sessionInfo()
## R version 3.5.1 (2018-07-02)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17134)
## 
## Matrix products: default
## 
## locale:
## [1] LC_COLLATE=English_United States.1252 
## [2] LC_CTYPE=English_United States.1252   
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C                          
## [5] LC_TIME=English_United States.1252    
## 
## attached base packages:
## [1] stats     graphics  grDevices utils     datasets  methods   base     
## 
## loaded via a namespace (and not attached):
##  [1] compiler_3.5.1  magrittr_1.5    tools_3.5.1     htmltools_0.3.6
##  [5] yaml_2.2.0      Rcpp_1.0.0      stringi_1.2.4   rmarkdown_1.11 
##  [9] knitr_1.20      stringr_1.3.1   digest_0.6.18   evaluate_0.12

User Inputs

output.var = params$output.var 

transform.abs = FALSE
log.pred = FALSE
norm.pred = FALSE
if (params$trans == 1){
  transform.abs == TRUE
}else if (params$trans == 2){
  log.pred = TRUE
}else if (params$trans == 3){
  norm.pred = TRUE  
}else{
  message("You have chosen no transformation")
}

eda = params$eda
algo.forward = params$algo.forward
algo.backward = params$algo.backward
algo.stepwise = params$algo.stepwise
algo.LASSO = params$algo.LASSO
algo.LARS = params$algo.LARS
  
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret

message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 13
##  $ output.var         : chr "y3"
##  $ trans              : int 3
##  $ eda                : logi FALSE
##  $ algo.forward       : logi FALSE
##  $ algo.backward      : logi FALSE
##  $ algo.stepwise      : logi FALSE
##  $ algo.LASSO         : logi FALSE
##  $ algo.LARS          : logi FALSE
##  $ algo.forward.caret : logi TRUE
##  $ algo.backward.caret: logi TRUE
##  $ algo.stepwise.caret: logi TRUE
##  $ algo.LASSO.caret   : logi TRUE
##  $ algo.LARS.caret    : logi TRUE
# Setup Labels
# alt.scale.label.name = Alternate Scale variable name
#   - if predicting on log, then alt.scale is normal scale
#   - if predicting on normal scale, then alt.scale is log scale
if (log.pred == TRUE){
  label.names = paste('log.',output.var,sep="")
  alt.scale.label.name = output.var
}
if (log.pred == FALSE & norm.pred==FALSE){
  label.names = output.var
  alt.scale.label.name = paste('log.',output.var,sep="")
}
if (norm.pred==TRUE){
  label.names = paste('norm.',output.var,sep="")
  alt.scale.label.name = output.var
}

Prepare Data

Read and Clean Features

features = read.csv("../../Data/features.csv")
features.highprec = read.csv("../../Data/features_highprec.csv")
all.equal(features, features.highprec)
##  [1] "Component \"x11\": Mean relative difference: 0.001401482"     
##  [2] "Component \"stat9\": Mean relative difference: 0.0002946299"  
##  [3] "Component \"stat12\": Mean relative difference: 0.0005151515" 
##  [4] "Component \"stat13\": Mean relative difference: 0.001354369"  
##  [5] "Component \"stat18\": Mean relative difference: 0.0005141104" 
##  [6] "Component \"stat22\": Mean relative difference: 0.001135977"  
##  [7] "Component \"stat25\": Mean relative difference: 0.0001884615" 
##  [8] "Component \"stat29\": Mean relative difference: 0.001083691"  
##  [9] "Component \"stat36\": Mean relative difference: 0.00021513"   
## [10] "Component \"stat37\": Mean relative difference: 0.0004578125" 
## [11] "Component \"stat43\": Mean relative difference: 0.0003473684" 
## [12] "Component \"stat45\": Mean relative difference: 0.0002951699" 
## [13] "Component \"stat46\": Mean relative difference: 0.0009745763" 
## [14] "Component \"stat47\": Mean relative difference: 8.829902e-05" 
## [15] "Component \"stat55\": Mean relative difference: 0.001438066"  
## [16] "Component \"stat57\": Mean relative difference: 0.0001056911" 
## [17] "Component \"stat58\": Mean relative difference: 0.0004882261" 
## [18] "Component \"stat60\": Mean relative difference: 0.0002408377" 
## [19] "Component \"stat62\": Mean relative difference: 0.0004885106" 
## [20] "Component \"stat66\": Mean relative difference: 1.73913e-06"  
## [21] "Component \"stat67\": Mean relative difference: 0.0006265823" 
## [22] "Component \"stat73\": Mean relative difference: 0.003846154"  
## [23] "Component \"stat75\": Mean relative difference: 0.002334906"  
## [24] "Component \"stat83\": Mean relative difference: 0.0005628415" 
## [25] "Component \"stat86\": Mean relative difference: 0.0006104418" 
## [26] "Component \"stat94\": Mean relative difference: 0.001005115"  
## [27] "Component \"stat97\": Mean relative difference: 0.0003551913" 
## [28] "Component \"stat98\": Mean relative difference: 0.0006157635" 
## [29] "Component \"stat106\": Mean relative difference: 0.0008267717"
## [30] "Component \"stat109\": Mean relative difference: 0.0005121359"
## [31] "Component \"stat110\": Mean relative difference: 0.0007615527"
## [32] "Component \"stat111\": Mean relative difference: 0.001336134" 
## [33] "Component \"stat114\": Mean relative difference: 7.680492e-05"
## [34] "Component \"stat117\": Mean relative difference: 0.0002421784"
## [35] "Component \"stat122\": Mean relative difference: 0.0006521084"
## [36] "Component \"stat123\": Mean relative difference: 8.333333e-05"
## [37] "Component \"stat125\": Mean relative difference: 0.002385135" 
## [38] "Component \"stat130\": Mean relative difference: 0.001874016" 
## [39] "Component \"stat132\": Mean relative difference: 0.0003193182"
## [40] "Component \"stat135\": Mean relative difference: 0.0001622517"
## [41] "Component \"stat136\": Mean relative difference: 7.722008e-05"
## [42] "Component \"stat138\": Mean relative difference: 0.0009739953"
## [43] "Component \"stat143\": Mean relative difference: 0.0004845361"
## [44] "Component \"stat146\": Mean relative difference: 0.0005821596"
## [45] "Component \"stat148\": Mean relative difference: 0.0005366922"
## [46] "Component \"stat153\": Mean relative difference: 0.0001557522"
## [47] "Component \"stat154\": Mean relative difference: 0.001351916" 
## [48] "Component \"stat157\": Mean relative difference: 0.0005427928"
## [49] "Component \"stat162\": Mean relative difference: 0.002622951" 
## [50] "Component \"stat167\": Mean relative difference: 0.0005905172"
## [51] "Component \"stat168\": Mean relative difference: 0.0002791096"
## [52] "Component \"stat169\": Mean relative difference: 0.0004121827"
## [53] "Component \"stat170\": Mean relative difference: 0.0004705882"
## [54] "Component \"stat174\": Mean relative difference: 0.0003822894"
## [55] "Component \"stat179\": Mean relative difference: 0.0008286604"
## [56] "Component \"stat184\": Mean relative difference: 0.0007526718"
## [57] "Component \"stat187\": Mean relative difference: 0.0005122768"
## [58] "Component \"stat193\": Mean relative difference: 4.215116e-05"
## [59] "Component \"stat199\": Mean relative difference: 0.002155844" 
## [60] "Component \"stat203\": Mean relative difference: 0.0003738318"
## [61] "Component \"stat213\": Mean relative difference: 0.000667676" 
## [62] "Component \"stat215\": Mean relative difference: 0.0003997955"
head(features)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10      x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.05e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.03e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.06e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.47e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.01e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.07e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
head(features.highprec)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10          x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.050025e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.034518e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.062312e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.471887e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.010552e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.071662e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
features = features.highprec
#str(features) 

Checking correlations to evaluate removal of redundant features

corr.matrix = round(cor(features[sapply(features, is.numeric)]),2)

# filter out only highly correlated variables
threshold = 0.6
corr.matrix.tmp = corr.matrix
diag(corr.matrix.tmp) = 0
high.corr = apply(abs(corr.matrix.tmp) >= threshold, 1, any)
high.corr.matrix = corr.matrix.tmp[high.corr, high.corr]

DT::datatable(corr.matrix)
DT::datatable(high.corr.matrix)

Feature Names

feature.names = colnames(features)
drops <- c('JobName')
feature.names = feature.names[!(feature.names %in% drops)]
#str(feature.names)

Read and Clean Labels

labels = read.csv("../../Data/labels.csv")
#str(labels)
labels = labels[,c("JobName", output.var)]
summary(labels)
##       JobName           y3        
##  Job_00001:   1   Min.   : 95.91  
##  Job_00002:   1   1st Qu.:118.21  
##  Job_00003:   1   Median :123.99  
##  Job_00004:   1   Mean   :125.36  
##  Job_00005:   1   3rd Qu.:131.06  
##  Job_00006:   1   Max.   :193.73  
##  (Other)  :9994   NA's   :2497

Merge Datasets

data <- merge(features, labels, by = 'JobName')
drops <- c('JobName')
data = data[,(!colnames(data) %in% drops)]
#str(data)

Transformations

if (transform.abs == TRUE){
  data[,label.names] = 10^(data[,label.names]/20)
  #data = filter(data, y3 < 1E7)
}
if (log.pred == TRUE){
  data[label.names] = log(data[alt.scale.label.name],10)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}

t = NULL # initializw to NULL for other cases
if (norm.pred){
  t = bestNormalize::bestNormalize(data[[alt.scale.label.name]])
  data[label.names] = predict(t)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}
## Warning in orderNorm(standardize = TRUE, warn = TRUE, x = c(121.2556129, : Ties in data, Normal distribution not guaranteed
#str(data)

Remove NA Cases

data = data[complete.cases(data),]

Exploratory Data Analysis

Check correlation of Label with Featires

if (eda == TRUE){
  corr.to.label =round(cor(dplyr::select(data,-one_of(label.names)),dplyr::select_at(data,label.names)),4)
  DT::datatable(corr.to.label)
}

Multicollinearity - VIF

if (eda == TRUE){
  vifDF = usdm::vif(select_at(data,feature.names)) %>% arrange(desc(VIF))
  head(vifDF,10)
}

Scatterplots

panel.hist <- function(x, ...)
{
    usr <- par("usr"); on.exit(par(usr))
    par(usr = c(usr[1:2], 0, 1.5) )
    h <- hist(x, plot = FALSE)
    breaks <- h$breaks; nB <- length(breaks)
    y <- h$counts; y <- y/max(y)
    rect(breaks[-nB], 0, breaks[-1], y, col = "cyan", ...)
}
if (eda == TRUE){
  histogram(data[ ,label.names])
  #hist(data[complete.cases(data),alt.scale.label.name])
}
# https://stackoverflow.com/questions/24648729/plot-one-numeric-variable-against-n-numeric-variables-in-n-plots
ind.pairs.plot <- function(data, xvars=NULL, yvar)
{
    df <- data
    if (is.null(xvars)) {
        xvars = names(data[which(names(data)!=yvar)])       
    }   

    #choose a format to display charts
    ncharts <- length(xvars) 
    
    for(i in 1:ncharts){    
        plot(df[,xvars[i]],df[,yvar], xlab = xvars[i], ylab = yvar)
    }
}

if (eda == TRUE){
  ind.pairs.plot(data, feature.names, label.names)
}

# 
# pl <- ggplot(data, aes(x=x18, y = y3))
# pl2 <- pl + geom_point(aes(alpha = 0.1)) # default color gradient based on 'hp'
# print(pl2)

Feature Engineering

if(eda ==FALSE){
  # x18 may need transformations
  plot(data[,'x18'], data[,label.names], main = "Original Scatter Plot vs. x18", ylab = label.names, xlab = 'x18')
  plot(sqrt(data[,'x18']), data[,label.names], main = "Original Scatter Plot vs. sqrt(x18)", ylab = label.names, xlab = 'sqrt(x18)')
  
  # transforming x18
  data$sqrt.x18 = sqrt(data$x18)
  data = dplyr::select(data,-one_of('x18'))
  
  # what about x7, x9?
  # x11 looks like data is at discrete points after a while. Will this be a problem?
}

Modeling

Train Test Split

data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)

data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)

Common Functions

plot.diagnostics <-  function(model, train) {
  plot(model)
  
  residuals = resid(model) # Plotted above in plot(lm.out)
  r.standard = rstandard(model)
  r.student = rstudent(model)

  plot(predict(model,train),r.student,
      ylab="Student Residuals", xlab="Predicted Values", 
      main="Student Residual Plot") 
  abline(0, 0)
  
  plot(predict(model, train),r.standard,
      ylab="Standard Residuals", xlab="Predicted Values", 
      main="Standard Residual Plot") 
  abline(0, 0)
  abline(2, 0)
  abline(-2, 0)
  
  # Histogram
  hist(r.student, freq=FALSE, main="Distribution of Studentized Residuals", 
  xlab="Studentized Residuals", ylab="Density", ylim=c(0,0.5))

  # Create range of x-values for normal curve
  xfit <- seq(min(r.student)-1, max(r.student)+1, length=40)

  # Generate values from the normal distribution at the specified values
  yfit <- (dnorm(xfit))

  # Add the normal curve
  lines(xfit, yfit, ylim=c(0,0.5))
  
  
  # http://www.stat.columbia.edu/~martin/W2024/R7.pdf
  # Influential plots
  inf.meas = influence.measures(model)
  # print (summary(inf.meas)) # too much data
  
  # Leverage plot
  lev = hat(model.matrix(model))
  plot(lev, ylab = 'Leverage - check')
  
  # Cook's Distance
  cd = cooks.distance(model)
  plot(cd,ylab="Cooks distances")
  abline(4/nrow(train),0)
  abline(1,0)
  
  print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = "")) 
  print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = "")) 
  return(cd)
}

train.caret.glmselect = function(formula, data, method
                                 ,subopt = NULL, feature.names
                                 , train.control = NULL, tune.grid = NULL, pre.proc = NULL){
  
  if(is.null(train.control)){
    train.control <- trainControl(method = "cv"
                              ,number = 10
                              ,search = "grid"
                              ,verboseIter = TRUE
                              ,allowParallel = TRUE
                              )
  }
  
  if(is.null(tune.grid)){
    if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
      tune.grid = data.frame(nvmax = 1:length(feature.names))
    }
    if (method == 'glmnet' && subopt == 'LASSO'){
      # Will only show 1 Lambda value during training, but that is OK
      # https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
      # Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
      lambda = 10^seq(-2,0, length =100)
      alpha = c(1)
      tune.grid = expand.grid(alpha = alpha,lambda = lambda)
    }
    if (method == 'lars'){
      # https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
      fraction = seq(0, 1, length = 100)
      tune.grid = expand.grid(fraction = fraction)
      pre.proc = c("center", "scale") 
    }
  }
  
  # http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
  cl <- makeCluster(detectCores()*0.75) # use 75% of cores only, leave rest for other tasks
  registerDoParallel(cl)

  set.seed(1) 
  # note that the seed has to actually be set just before this function is called
  # settign is above just not ensure reproducibility for some reason
  model.caret <- caret::train(formula
                              , data = data
                              , method = method
                              , tuneGrid = tune.grid
                              , trControl = train.control
                              , preProc = pre.proc
                              )
  
  stopCluster(cl)
  registerDoSEQ() # register sequential engine in case you are not using this function anymore
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    print(model.caret$results) # all model results
    print(model.caret$bestTune) # best model
  
    model = model.caret$finalModel

    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-nvmax) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=nvmax,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    # leap function does not support studentized residuals
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)
   
    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)
    
    # Provides the coefficients of the best model
    id = rownames(model.caret$bestTune)
    message("Coefficients of final model:")
    print (coef(model, id = id))
    
    return(list(model = model,id = id, residPlot = residPlot, residHistogram=residHistogram))
  }
  if (method == 'glmnet' && subopt == 'LASSO'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    print(model.caret$results)
    
    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-lambda) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=lambda,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot 
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') +
      theme_light()
    plot(residHistogram)

    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, metricsPlot=metricsPlot ))
  }
  if (method == 'lars'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Metrics Plot
    dataPlot = model.caret$results %>%
        gather(key='metric',value='value',-fraction) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=fraction,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)

    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, residHistogram=residHistogram))
  }
}

# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changed slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
    #form <- as.formula(object$call[[2]])
    mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
    coefi <- coef(object, id = id)
    xvars <- names(coefi)
    return(mat[,xvars]%*%coefi)
}
  
test.model = function(model, test, level=0.95
                      ,draw.limits = FALSE, good = 0.1, ok = 0.15
                      ,method = NULL, subopt = NULL
                      ,id = NULL, formula, feature.names, label.names
                      ,transformation = NULL){
  ## if using caret for glm select equivalent functionality, 
  ## need to pass formula (full is ok as it will select subset of variables from there)
  if (is.null(method)){
    pred = predict(model, newdata=test, interval="confidence", level = level) 
  }
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
  }
  
  if (method == 'glmnet' && subopt == 'LASSO'){
    xtest = as.matrix(test[,feature.names]) 
    pred=as.data.frame(predict(model, xtest))
  }
  
  if (method == 'lars'){
    pred=as.data.frame(predict(model, newdata = test))
  }
    
  # Summary of predicted values
  print ("Summary of predicted values: ")
  print(summary(pred[,1]))

  test.mse = mean((test[,label.names]-pred[,1])^2)
  print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))
  
  if(log.pred == TRUE || norm.pred == TRUE){
    # plot transformewd comparison first
    plot(test[,label.names],pred[,1],xlab = "Actual (Transformed)", ylab = "Predicted (Transformed)")
  }
    
  if (log.pred == FALSE && norm.pred == FALSE){
    x = test[,label.names]
    y = pred[,1]
  }
  if (log.pred == TRUE){
    x = 10^test[,label.names]
    y = 10^pred[,1]  
  }
  if (norm.pred == TRUE){
    x = predict(transformation, test[,label.names], inverse = TRUE)
    y = predict(transformation, pred[,1], inverse = TRUE)
  }

  plot(x, y, xlab = "Actual", ylab = "Predicted")
  abline(0,(1+good),col='green', lwd = 3)
  abline(0,(1-good),col='green', lwd = 3)
  abline(0,(1+ok),col='blue', lwd = 3)
  abline(0,(1-ok),col='blue', lwd = 3)
  
}

Setup Formulae

n <- names(data.train)
 formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~", paste(n[!n %in% label.names], collapse = " + "))) 

# ind.interact = c("x4","x7","x8", "x9", "x10", "x11", "x14", "x16", "x17", "x21", "sqrt.x18")
# ind.nointeract = c("stat13", "stat14", "stat24", "stat60", "stat98", "stat110", "stat144", "stat149")
# 
# interact = paste(ind.interact, collapse = " + ")
# nointeract = paste(ind.nointeract, collapse = " + ")
# 
# # ^2 is 2 way interaction, ^3 is 3 way interaction
# formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " )^2 ", " + ", nointeract ))
# 
# # # * is all way interaction
# # formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " ) ", " + ", nointeract ))

grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))

print(formula)
## norm.y3 ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + 
##     x11 + x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + 
##     x22 + x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + 
##     stat7 + stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + 
##     stat14 + stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + 
##     stat21 + stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + 
##     stat28 + stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + 
##     stat35 + stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + 
##     stat42 + stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + 
##     stat49 + stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + 
##     stat56 + stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + 
##     stat63 + stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + 
##     stat70 + stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + 
##     stat77 + stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + 
##     stat84 + stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + 
##     stat91 + stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + 
##     stat98 + stat99 + stat100 + stat101 + stat102 + stat103 + 
##     stat104 + stat105 + stat106 + stat107 + stat108 + stat109 + 
##     stat110 + stat111 + stat112 + stat113 + stat114 + stat115 + 
##     stat116 + stat117 + stat118 + stat119 + stat120 + stat121 + 
##     stat122 + stat123 + stat124 + stat125 + stat126 + stat127 + 
##     stat128 + stat129 + stat130 + stat131 + stat132 + stat133 + 
##     stat134 + stat135 + stat136 + stat137 + stat138 + stat139 + 
##     stat140 + stat141 + stat142 + stat143 + stat144 + stat145 + 
##     stat146 + stat147 + stat148 + stat149 + stat150 + stat151 + 
##     stat152 + stat153 + stat154 + stat155 + stat156 + stat157 + 
##     stat158 + stat159 + stat160 + stat161 + stat162 + stat163 + 
##     stat164 + stat165 + stat166 + stat167 + stat168 + stat169 + 
##     stat170 + stat171 + stat172 + stat173 + stat174 + stat175 + 
##     stat176 + stat177 + stat178 + stat179 + stat180 + stat181 + 
##     stat182 + stat183 + stat184 + stat185 + stat186 + stat187 + 
##     stat188 + stat189 + stat190 + stat191 + stat192 + stat193 + 
##     stat194 + stat195 + stat196 + stat197 + stat198 + stat199 + 
##     stat200 + stat201 + stat202 + stat203 + stat204 + stat205 + 
##     stat206 + stat207 + stat208 + stat209 + stat210 + stat211 + 
##     stat212 + stat213 + stat214 + stat215 + stat216 + stat217 + 
##     sqrt.x18
print(grand.mean.formula)
## norm.y3 ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]

Full Model

model.full = lm(formula , data.train)
summary(model.full)
## 
## Call:
## lm(formula = formula, data = data.train)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -2.6319 -0.5798 -0.0821  0.5194  3.9322 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -3.835e+00  2.480e-01 -15.460  < 2e-16 ***
## x1          -7.233e-03  1.702e-02  -0.425 0.670932    
## x2           1.029e-02  1.085e-02   0.948 0.343192    
## x3           1.859e-03  2.959e-03   0.628 0.529782    
## x4          -1.265e-03  2.327e-04  -5.435 5.69e-08 ***
## x5           6.741e-03  7.662e-03   0.880 0.378987    
## x6          -2.331e-03  1.551e-02  -0.150 0.880579    
## x7           3.414e-01  1.659e-02  20.579  < 2e-16 ***
## x8           9.206e-03  3.848e-03   2.392 0.016775 *  
## x9           9.742e-02  8.586e-03  11.347  < 2e-16 ***
## x10          3.911e-02  7.986e-03   4.897 9.98e-07 ***
## x11          5.334e+06  1.922e+06   2.775 0.005531 ** 
## x12         -4.101e-03  4.869e-03  -0.842 0.399655    
## x13          3.982e-03  1.935e-03   2.058 0.039670 *  
## x14         -8.619e-03  8.376e-03  -1.029 0.303522    
## x15          1.072e-03  8.005e-03   0.134 0.893451    
## x16          2.619e-02  5.536e-03   4.730 2.29e-06 ***
## x17          3.261e-02  8.476e-03   3.847 0.000121 ***
## x19          6.184e-03  4.276e-03   1.446 0.148154    
## x20         -1.092e-02  2.985e-02  -0.366 0.714618    
## x21          4.758e-03  1.099e-03   4.328 1.53e-05 ***
## x22         -1.411e-02  8.921e-03  -1.582 0.113799    
## x23         -3.187e-04  8.511e-03  -0.037 0.970127    
## stat1       -3.136e-03  6.432e-03  -0.488 0.625829    
## stat2        5.801e-03  6.372e-03   0.910 0.362640    
## stat3        1.181e-02  6.468e-03   1.826 0.067960 .  
## stat4       -1.393e-02  6.444e-03  -2.162 0.030674 *  
## stat5       -6.455e-03  6.471e-03  -0.998 0.318558    
## stat6       -6.446e-03  6.444e-03  -1.000 0.317206    
## stat7        1.590e-03  6.441e-03   0.247 0.805091    
## stat8       -3.295e-03  6.459e-03  -0.510 0.610003    
## stat9       -1.013e-04  6.422e-03  -0.016 0.987408    
## stat10      -7.420e-03  6.412e-03  -1.157 0.247266    
## stat11      -1.111e-02  6.499e-03  -1.709 0.087434 .  
## stat12       2.252e-03  6.404e-03   0.352 0.725165    
## stat13      -7.266e-03  6.437e-03  -1.129 0.259086    
## stat14      -2.467e-02  6.403e-03  -3.854 0.000118 ***
## stat15      -7.483e-03  6.391e-03  -1.171 0.241651    
## stat16       3.061e-03  6.419e-03   0.477 0.633498    
## stat17      -3.934e-03  6.367e-03  -0.618 0.536730    
## stat18      -6.051e-03  6.373e-03  -0.949 0.342452    
## stat19       5.262e-03  6.412e-03   0.821 0.411925    
## stat20      -9.923e-03  6.431e-03  -1.543 0.122879    
## stat21      -1.094e-03  6.479e-03  -0.169 0.865931    
## stat22      -7.535e-03  6.463e-03  -1.166 0.243706    
## stat23       1.464e-02  6.419e-03   2.281 0.022591 *  
## stat24      -1.080e-02  6.446e-03  -1.675 0.094020 .  
## stat25      -1.018e-02  6.433e-03  -1.583 0.113469    
## stat26      -7.021e-03  6.445e-03  -1.089 0.276060    
## stat27       2.518e-03  6.414e-03   0.393 0.694657    
## stat28       2.452e-03  6.438e-03   0.381 0.703273    
## stat29       6.505e-03  6.454e-03   1.008 0.313538    
## stat30       5.059e-03  6.521e-03   0.776 0.437895    
## stat31      -5.189e-03  6.472e-03  -0.802 0.422725    
## stat32       1.982e-03  6.499e-03   0.305 0.760363    
## stat33      -1.368e-02  6.438e-03  -2.124 0.033685 *  
## stat34       5.679e-03  6.445e-03   0.881 0.378276    
## stat35      -7.725e-03  6.405e-03  -1.206 0.227869    
## stat36       5.438e-03  6.372e-03   0.853 0.393445    
## stat37      -1.131e-02  6.512e-03  -1.736 0.082544 .  
## stat38       8.730e-03  6.427e-03   1.358 0.174372    
## stat39      -6.666e-03  6.387e-03  -1.044 0.296662    
## stat40       1.082e-03  6.429e-03   0.168 0.866414    
## stat41      -1.732e-02  6.393e-03  -2.710 0.006756 ** 
## stat42      -8.632e-03  6.417e-03  -1.345 0.178633    
## stat43      -1.146e-02  6.443e-03  -1.779 0.075360 .  
## stat44       8.450e-03  6.386e-03   1.323 0.185826    
## stat45      -5.952e-03  6.431e-03  -0.926 0.354682    
## stat46       9.941e-03  6.443e-03   1.543 0.122903    
## stat47       4.048e-03  6.490e-03   0.624 0.532810    
## stat48       4.397e-03  6.423e-03   0.685 0.493629    
## stat49       4.762e-03  6.383e-03   0.746 0.455678    
## stat50       5.713e-03  6.389e-03   0.894 0.371272    
## stat51       6.229e-03  6.422e-03   0.970 0.332110    
## stat52      -3.701e-03  6.432e-03  -0.575 0.565047    
## stat53      -2.280e-03  6.485e-03  -0.352 0.725203    
## stat54      -1.029e-02  6.448e-03  -1.596 0.110625    
## stat55       8.917e-03  6.355e-03   1.403 0.160609    
## stat56      -1.568e-03  6.447e-03  -0.243 0.807870    
## stat57       3.190e-03  6.375e-03   0.500 0.616768    
## stat58      -1.119e-03  6.398e-03  -0.175 0.861183    
## stat59       7.338e-04  6.438e-03   0.114 0.909257    
## stat60       1.458e-02  6.432e-03   2.267 0.023438 *  
## stat61      -3.006e-03  6.460e-03  -0.465 0.641703    
## stat62      -6.957e-03  6.437e-03  -1.081 0.279865    
## stat63       4.627e-03  6.423e-03   0.720 0.471351    
## stat64       6.275e-05  6.380e-03   0.010 0.992153    
## stat65      -2.031e-03  6.433e-03  -0.316 0.752229    
## stat66       7.616e-03  6.507e-03   1.170 0.241859    
## stat67       5.753e-03  6.483e-03   0.887 0.374937    
## stat68      -8.286e-04  6.454e-03  -0.128 0.897853    
## stat69      -3.988e-04  6.443e-03  -0.062 0.950650    
## stat70       1.363e-03  6.414e-03   0.212 0.831760    
## stat71       3.471e-03  6.410e-03   0.541 0.588187    
## stat72      -1.285e-03  6.434e-03  -0.200 0.841653    
## stat73       1.195e-02  6.447e-03   1.853 0.063898 .  
## stat74      -4.099e-03  6.472e-03  -0.633 0.526530    
## stat75      -1.802e-03  6.477e-03  -0.278 0.780825    
## stat76       6.068e-03  6.423e-03   0.945 0.344880    
## stat77      -4.568e-03  6.447e-03  -0.709 0.478653    
## stat78      -4.082e-03  6.480e-03  -0.630 0.528704    
## stat79       3.659e-04  6.454e-03   0.057 0.954787    
## stat80       1.004e-02  6.456e-03   1.555 0.120110    
## stat81       6.065e-03  6.446e-03   0.941 0.346849    
## stat82       3.653e-03  6.390e-03   0.572 0.567588    
## stat83      -3.894e-03  6.422e-03  -0.606 0.544268    
## stat84      -3.232e-03  6.453e-03  -0.501 0.616516    
## stat85      -6.129e-03  6.462e-03  -0.948 0.342996    
## stat86       2.321e-03  6.460e-03   0.359 0.719345    
## stat87      -1.301e-02  6.453e-03  -2.017 0.043779 *  
## stat88      -8.228e-03  6.380e-03  -1.290 0.197231    
## stat89      -8.411e-03  6.396e-03  -1.315 0.188556    
## stat90      -5.208e-03  6.437e-03  -0.809 0.418486    
## stat91      -1.303e-02  6.356e-03  -2.050 0.040384 *  
## stat92      -1.683e-02  6.451e-03  -2.608 0.009119 ** 
## stat93      -3.401e-03  6.451e-03  -0.527 0.598028    
## stat94      -7.454e-03  6.451e-03  -1.156 0.247926    
## stat95      -2.965e-04  6.459e-03  -0.046 0.963388    
## stat96      -7.695e-03  6.391e-03  -1.204 0.228681    
## stat97      -1.005e-04  6.360e-03  -0.016 0.987392    
## stat98       1.035e-01  6.311e-03  16.397  < 2e-16 ***
## stat99       3.297e-03  6.511e-03   0.506 0.612661    
## stat100      1.544e-02  6.447e-03   2.395 0.016648 *  
## stat101     -3.028e-03  6.471e-03  -0.468 0.639870    
## stat102      1.842e-03  6.440e-03   0.286 0.774832    
## stat103     -8.171e-03  6.498e-03  -1.257 0.208697    
## stat104     -5.479e-03  6.401e-03  -0.856 0.392062    
## stat105      6.867e-03  6.364e-03   1.079 0.280565    
## stat106     -6.228e-03  6.382e-03  -0.976 0.329216    
## stat107      9.862e-04  6.432e-03   0.153 0.878143    
## stat108     -4.720e-03  6.436e-03  -0.733 0.463345    
## stat109      4.534e-03  6.407e-03   0.708 0.479156    
## stat110     -9.551e-02  6.391e-03 -14.944  < 2e-16 ***
## stat111     -3.954e-03  6.446e-03  -0.613 0.539646    
## stat112     -2.024e-03  6.476e-03  -0.313 0.754666    
## stat113     -2.050e-04  6.485e-03  -0.032 0.974782    
## stat114      4.732e-03  6.409e-03   0.738 0.460367    
## stat115      8.549e-03  6.418e-03   1.332 0.182895    
## stat116      9.455e-03  6.456e-03   1.465 0.143087    
## stat117      3.403e-03  6.469e-03   0.526 0.598865    
## stat118     -9.630e-03  6.392e-03  -1.507 0.131934    
## stat119      3.087e-03  6.428e-03   0.480 0.631117    
## stat120      3.195e-04  6.367e-03   0.050 0.959983    
## stat121     -8.602e-03  6.455e-03  -1.333 0.182725    
## stat122     -5.817e-03  6.404e-03  -0.908 0.363741    
## stat123      2.419e-03  6.510e-03   0.372 0.710167    
## stat124     -6.335e-03  6.444e-03  -0.983 0.325557    
## stat125      6.947e-03  6.443e-03   1.078 0.281011    
## stat126      1.043e-02  6.382e-03   1.634 0.102241    
## stat127      4.574e-03  6.422e-03   0.712 0.476364    
## stat128     -1.078e-02  6.423e-03  -1.679 0.093225 .  
## stat129     -5.211e-03  6.425e-03  -0.811 0.417400    
## stat130      5.040e-03  6.479e-03   0.778 0.436649    
## stat131      4.850e-03  6.447e-03   0.752 0.451909    
## stat132     -4.909e-03  6.394e-03  -0.768 0.442675    
## stat133     -3.181e-03  6.439e-03  -0.494 0.621323    
## stat134     -6.248e-03  6.418e-03  -0.974 0.330334    
## stat135     -5.217e-04  6.458e-03  -0.081 0.935620    
## stat136      1.498e-03  6.467e-03   0.232 0.816828    
## stat137      3.729e-03  6.416e-03   0.581 0.561159    
## stat138      1.642e-03  6.424e-03   0.256 0.798308    
## stat139      5.367e-03  6.446e-03   0.833 0.405074    
## stat140     -1.504e-03  6.387e-03  -0.235 0.813898    
## stat141      7.460e-03  6.377e-03   1.170 0.242119    
## stat142     -2.202e-03  6.509e-03  -0.338 0.735093    
## stat143      4.968e-03  6.470e-03   0.768 0.442609    
## stat144      1.755e-02  6.428e-03   2.730 0.006349 ** 
## stat145      7.676e-04  6.482e-03   0.118 0.905745    
## stat146     -1.712e-02  6.426e-03  -2.664 0.007743 ** 
## stat147     -5.558e-03  6.503e-03  -0.855 0.392782    
## stat148     -9.068e-03  6.377e-03  -1.422 0.155050    
## stat149     -9.940e-03  6.442e-03  -1.543 0.122894    
## stat150      2.899e-03  6.476e-03   0.448 0.654443    
## stat151     -3.044e-03  6.501e-03  -0.468 0.639631    
## stat152     -4.849e-03  6.420e-03  -0.755 0.450051    
## stat153      9.859e-04  6.504e-03   0.152 0.879510    
## stat154      4.942e-04  6.518e-03   0.076 0.939568    
## stat155     -1.985e-03  6.409e-03  -0.310 0.756707    
## stat156      1.223e-02  6.469e-03   1.890 0.058778 .  
## stat157      1.573e-03  6.416e-03   0.245 0.806359    
## stat158     -3.395e-03  6.534e-03  -0.520 0.603402    
## stat159     -7.670e-04  6.442e-03  -0.119 0.905229    
## stat160      7.535e-04  6.499e-03   0.116 0.907696    
## stat161      5.815e-03  6.455e-03   0.901 0.367714    
## stat162      9.981e-05  6.380e-03   0.016 0.987519    
## stat163      3.660e-03  6.502e-03   0.563 0.573490    
## stat164      9.777e-03  6.476e-03   1.510 0.131148    
## stat165      4.507e-03  6.403e-03   0.704 0.481538    
## stat166     -3.907e-03  6.330e-03  -0.617 0.537096    
## stat167     -9.456e-03  6.432e-03  -1.470 0.141580    
## stat168     -5.324e-03  6.459e-03  -0.824 0.409773    
## stat169     -2.612e-04  6.423e-03  -0.041 0.967565    
## stat170     -4.085e-03  6.426e-03  -0.636 0.524981    
## stat171      5.913e-03  6.514e-03   0.908 0.364089    
## stat172      6.988e-03  6.416e-03   1.089 0.276128    
## stat173     -7.858e-03  6.454e-03  -1.218 0.223441    
## stat174     -5.624e-03  6.429e-03  -0.875 0.381777    
## stat175     -5.600e-03  6.452e-03  -0.868 0.385519    
## stat176      1.599e-03  6.439e-03   0.248 0.803927    
## stat177     -4.052e-03  6.428e-03  -0.630 0.528515    
## stat178      2.329e-03  6.499e-03   0.358 0.720106    
## stat179     -2.716e-03  6.424e-03  -0.423 0.672422    
## stat180     -7.453e-03  6.397e-03  -1.165 0.244080    
## stat181      3.352e-03  6.467e-03   0.518 0.604250    
## stat182      6.080e-03  6.480e-03   0.938 0.348185    
## stat183      5.721e-03  6.440e-03   0.888 0.374423    
## stat184      7.671e-03  6.491e-03   1.182 0.237341    
## stat185     -3.219e-03  6.374e-03  -0.505 0.613561    
## stat186     -7.090e-03  6.453e-03  -1.099 0.271981    
## stat187     -1.418e-02  6.383e-03  -2.222 0.026314 *  
## stat188     -8.074e-04  6.409e-03  -0.126 0.899750    
## stat189      2.665e-03  6.420e-03   0.415 0.678097    
## stat190      4.180e-03  6.373e-03   0.656 0.511936    
## stat191     -1.135e-02  6.465e-03  -1.755 0.079312 .  
## stat192     -6.346e-04  6.493e-03  -0.098 0.922139    
## stat193      5.267e-03  6.523e-03   0.807 0.419413    
## stat194     -2.616e-03  6.383e-03  -0.410 0.681971    
## stat195      5.504e-03  6.412e-03   0.858 0.390665    
## stat196     -1.342e-03  6.511e-03  -0.206 0.836706    
## stat197     -1.765e-03  6.400e-03  -0.276 0.782699    
## stat198     -1.019e-02  6.445e-03  -1.581 0.113852    
## stat199      1.007e-02  6.372e-03   1.580 0.114059    
## stat200     -5.205e-03  6.394e-03  -0.814 0.415666    
## stat201      4.307e-03  6.430e-03   0.670 0.503007    
## stat202     -4.267e-03  6.512e-03  -0.655 0.512326    
## stat203      4.532e-04  6.404e-03   0.071 0.943588    
## stat204     -1.218e-02  6.415e-03  -1.898 0.057703 .  
## stat205     -8.533e-03  6.408e-03  -1.332 0.183036    
## stat206     -7.150e-03  6.484e-03  -1.103 0.270213    
## stat207      1.090e-02  6.468e-03   1.686 0.091935 .  
## stat208      2.912e-03  6.438e-03   0.452 0.651031    
## stat209     -2.892e-03  6.401e-03  -0.452 0.651478    
## stat210     -2.436e-03  6.442e-03  -0.378 0.705316    
## stat211     -6.395e-04  6.436e-03  -0.099 0.920853    
## stat212     -3.340e-04  6.420e-03  -0.052 0.958507    
## stat213     -2.571e-03  6.471e-03  -0.397 0.691137    
## stat214     -1.014e-02  6.408e-03  -1.583 0.113459    
## stat215     -1.026e-02  6.437e-03  -1.594 0.110898    
## stat216     -4.468e-03  6.451e-03  -0.693 0.488536    
## stat217      7.796e-03  6.435e-03   1.211 0.225768    
## sqrt.x18     8.033e-01  2.464e-02  32.608  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.8462 on 5761 degrees of freedom
## Multiple R-squared:  0.311,  Adjusted R-squared:  0.2823 
## F-statistic: 10.83 on 240 and 5761 DF,  p-value: < 2.2e-16
cd.full = plot.diagnostics(model.full, data.train)

## [1] "Number of data points that have Cook's D > 4/n: 309"
## [1] "Number of data points that have Cook's D > 1: 0"

Checking with removal of high influence points

high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
## 
## Call:
## lm(formula = formula, data = data.train2)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -1.80383 -0.50441 -0.04955  0.50249  1.84196 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -4.003e+00  2.165e-01 -18.486  < 2e-16 ***
## x1          -2.074e-02  1.489e-02  -1.392 0.163862    
## x2           7.454e-03  9.496e-03   0.785 0.432513    
## x3           7.589e-05  2.581e-03   0.029 0.976541    
## x4          -1.402e-03  2.037e-04  -6.882 6.57e-12 ***
## x5           1.239e-02  6.692e-03   1.851 0.064221 .  
## x6          -8.395e-03  1.352e-02  -0.621 0.534687    
## x7           3.586e-01  1.452e-02  24.697  < 2e-16 ***
## x8           1.128e-02  3.366e-03   3.351 0.000812 ***
## x9           1.005e-01  7.504e-03  13.387  < 2e-16 ***
## x10          4.573e-02  6.990e-03   6.543 6.58e-11 ***
## x11          5.270e+06  1.678e+06   3.140 0.001700 ** 
## x12         -2.917e-03  4.240e-03  -0.688 0.491525    
## x13          5.543e-03  1.694e-03   3.272 0.001075 ** 
## x14         -1.038e-02  7.325e-03  -1.418 0.156350    
## x15          3.775e-03  6.997e-03   0.540 0.589524    
## x16          2.442e-02  4.840e-03   5.045 4.67e-07 ***
## x17          3.346e-02  7.405e-03   4.518 6.36e-06 ***
## x19          7.000e-03  3.739e-03   1.872 0.061253 .  
## x20         -1.224e-02  2.610e-02  -0.469 0.639076    
## x21          4.915e-03  9.596e-04   5.122 3.13e-07 ***
## x22         -1.914e-02  7.787e-03  -2.457 0.014031 *  
## x23          8.417e-04  7.439e-03   0.113 0.909916    
## stat1       -3.853e-03  5.610e-03  -0.687 0.492242    
## stat2        8.196e-03  5.560e-03   1.474 0.140503    
## stat3        1.130e-02  5.648e-03   2.001 0.045477 *  
## stat4       -2.013e-02  5.639e-03  -3.570 0.000360 ***
## stat5       -8.208e-03  5.657e-03  -1.451 0.146849    
## stat6       -9.100e-03  5.619e-03  -1.619 0.105418    
## stat7        3.241e-03  5.618e-03   0.577 0.563996    
## stat8       -5.448e-03  5.643e-03  -0.965 0.334410    
## stat9        2.790e-04  5.613e-03   0.050 0.960361    
## stat10      -6.538e-03  5.594e-03  -1.169 0.242538    
## stat11      -1.284e-02  5.673e-03  -2.264 0.023627 *  
## stat12       5.048e-03  5.595e-03   0.902 0.366892    
## stat13      -7.928e-03  5.623e-03  -1.410 0.158614    
## stat14      -3.069e-02  5.585e-03  -5.495 4.09e-08 ***
## stat15      -1.508e-02  5.586e-03  -2.699 0.006967 ** 
## stat16      -2.924e-05  5.599e-03  -0.005 0.995833    
## stat17      -4.747e-03  5.565e-03  -0.853 0.393659    
## stat18      -3.256e-03  5.555e-03  -0.586 0.557818    
## stat19       4.893e-03  5.609e-03   0.872 0.383055    
## stat20      -4.002e-03  5.637e-03  -0.710 0.477755    
## stat21       9.920e-04  5.651e-03   0.176 0.860648    
## stat22      -5.107e-03  5.641e-03  -0.905 0.365318    
## stat23       1.521e-02  5.619e-03   2.708 0.006796 ** 
## stat24      -1.284e-02  5.634e-03  -2.279 0.022726 *  
## stat25      -1.015e-02  5.628e-03  -1.803 0.071448 .  
## stat26      -8.477e-03  5.631e-03  -1.505 0.132280    
## stat27       2.988e-03  5.621e-03   0.531 0.595098    
## stat28       2.213e-03  5.631e-03   0.393 0.694366    
## stat29       5.510e-03  5.640e-03   0.977 0.328603    
## stat30       3.500e-03  5.679e-03   0.616 0.537796    
## stat31      -1.977e-03  5.650e-03  -0.350 0.726425    
## stat32       5.982e-03  5.700e-03   1.049 0.294030    
## stat33      -1.248e-02  5.627e-03  -2.218 0.026594 *  
## stat34       7.269e-03  5.645e-03   1.288 0.197923    
## stat35      -1.085e-02  5.605e-03  -1.936 0.052932 .  
## stat36       6.848e-03  5.569e-03   1.230 0.218820    
## stat37      -1.293e-02  5.686e-03  -2.274 0.022997 *  
## stat38       1.099e-02  5.608e-03   1.959 0.050138 .  
## stat39      -7.185e-03  5.565e-03  -1.291 0.196698    
## stat40       2.234e-03  5.627e-03   0.397 0.691357    
## stat41      -1.747e-02  5.579e-03  -3.131 0.001752 ** 
## stat42      -6.770e-03  5.622e-03  -1.204 0.228515    
## stat43      -1.183e-02  5.635e-03  -2.100 0.035791 *  
## stat44       7.781e-03  5.584e-03   1.394 0.163514    
## stat45      -5.918e-03  5.619e-03  -1.053 0.292312    
## stat46       9.009e-03  5.633e-03   1.599 0.109803    
## stat47       6.639e-03  5.671e-03   1.171 0.241805    
## stat48       1.200e-03  5.612e-03   0.214 0.830734    
## stat49       2.438e-03  5.579e-03   0.437 0.662177    
## stat50       1.406e-03  5.588e-03   0.252 0.801411    
## stat51       4.526e-03  5.608e-03   0.807 0.419687    
## stat52      -2.646e-03  5.629e-03  -0.470 0.638256    
## stat53      -1.789e-03  5.663e-03  -0.316 0.752111    
## stat54      -9.090e-03  5.635e-03  -1.613 0.106803    
## stat55       5.982e-03  5.554e-03   1.077 0.281518    
## stat56       4.310e-04  5.639e-03   0.076 0.939078    
## stat57       3.015e-03  5.576e-03   0.541 0.588775    
## stat58       1.283e-04  5.575e-03   0.023 0.981636    
## stat59      -5.323e-04  5.610e-03  -0.095 0.924399    
## stat60       1.631e-02  5.627e-03   2.898 0.003769 ** 
## stat61      -1.952e-03  5.652e-03  -0.345 0.729911    
## stat62      -9.415e-03  5.617e-03  -1.676 0.093759 .  
## stat63       2.096e-03  5.612e-03   0.374 0.708787    
## stat64       4.352e-03  5.571e-03   0.781 0.434701    
## stat65       1.373e-03  5.630e-03   0.244 0.807344    
## stat66       6.133e-03  5.685e-03   1.079 0.280719    
## stat67       3.300e-03  5.653e-03   0.584 0.559333    
## stat68      -2.003e-03  5.637e-03  -0.355 0.722404    
## stat69      -1.223e-03  5.635e-03  -0.217 0.828139    
## stat70       2.681e-03  5.603e-03   0.479 0.632267    
## stat71       4.478e-03  5.602e-03   0.799 0.424170    
## stat72      -5.647e-03  5.613e-03  -1.006 0.314444    
## stat73       6.891e-03  5.638e-03   1.222 0.221680    
## stat74      -2.848e-03  5.658e-03  -0.503 0.614699    
## stat75       3.292e-03  5.655e-03   0.582 0.560436    
## stat76       9.730e-03  5.600e-03   1.737 0.082356 .  
## stat77       9.277e-04  5.638e-03   0.165 0.869314    
## stat78      -7.300e-03  5.645e-03  -1.293 0.195959    
## stat79       1.791e-03  5.634e-03   0.318 0.750553    
## stat80       1.115e-02  5.642e-03   1.976 0.048201 *  
## stat81       6.054e-03  5.637e-03   1.074 0.282914    
## stat82       1.846e-03  5.576e-03   0.331 0.740645    
## stat83      -1.998e-03  5.615e-03  -0.356 0.721978    
## stat84      -8.435e-03  5.639e-03  -1.496 0.134769    
## stat85      -9.330e-03  5.651e-03  -1.651 0.098817 .  
## stat86       5.128e-03  5.654e-03   0.907 0.364438    
## stat87      -1.003e-02  5.629e-03  -1.782 0.074801 .  
## stat88      -4.986e-03  5.585e-03  -0.893 0.372062    
## stat89      -1.785e-03  5.601e-03  -0.319 0.750022    
## stat90      -8.185e-03  5.631e-03  -1.454 0.146118    
## stat91      -1.080e-02  5.542e-03  -1.949 0.051367 .  
## stat92      -1.243e-02  5.639e-03  -2.204 0.027584 *  
## stat93      -1.082e-03  5.669e-03  -0.191 0.848710    
## stat94      -6.814e-03  5.628e-03  -1.211 0.226031    
## stat95       2.179e-03  5.653e-03   0.385 0.699942    
## stat96      -6.876e-03  5.583e-03  -1.232 0.218182    
## stat97       1.612e-03  5.559e-03   0.290 0.771810    
## stat98       1.076e-01  5.523e-03  19.480  < 2e-16 ***
## stat99       7.657e-03  5.688e-03   1.346 0.178317    
## stat100      1.745e-02  5.622e-03   3.104 0.001916 ** 
## stat101      2.793e-04  5.648e-03   0.049 0.960555    
## stat102      1.998e-03  5.620e-03   0.355 0.722266    
## stat103     -1.086e-02  5.677e-03  -1.913 0.055826 .  
## stat104     -5.808e-03  5.605e-03  -1.036 0.300165    
## stat105      8.630e-03  5.569e-03   1.549 0.121323    
## stat106     -6.706e-03  5.564e-03  -1.205 0.228188    
## stat107      1.987e-03  5.622e-03   0.353 0.723783    
## stat108     -7.122e-03  5.631e-03  -1.265 0.205989    
## stat109      2.914e-03  5.604e-03   0.520 0.603059    
## stat110     -1.019e-01  5.582e-03 -18.251  < 2e-16 ***
## stat111     -5.994e-03  5.630e-03  -1.065 0.287142    
## stat112      4.543e-04  5.656e-03   0.080 0.935986    
## stat113      1.047e-03  5.661e-03   0.185 0.853230    
## stat114      6.157e-03  5.598e-03   1.100 0.271490    
## stat115      1.112e-02  5.613e-03   1.981 0.047668 *  
## stat116      1.177e-02  5.643e-03   2.086 0.037025 *  
## stat117      4.778e-03  5.647e-03   0.846 0.397480    
## stat118     -5.691e-03  5.578e-03  -1.020 0.307600    
## stat119      1.202e-02  5.613e-03   2.142 0.032217 *  
## stat120     -2.687e-03  5.560e-03  -0.483 0.628915    
## stat121     -1.194e-02  5.649e-03  -2.113 0.034659 *  
## stat122     -1.137e-02  5.603e-03  -2.029 0.042477 *  
## stat123      9.707e-03  5.689e-03   1.706 0.088027 .  
## stat124     -1.144e-02  5.628e-03  -2.033 0.042143 *  
## stat125      5.262e-03  5.639e-03   0.933 0.350814    
## stat126      1.454e-02  5.572e-03   2.610 0.009089 ** 
## stat127      2.341e-03  5.612e-03   0.417 0.676615    
## stat128     -1.099e-02  5.603e-03  -1.961 0.049971 *  
## stat129     -8.604e-03  5.610e-03  -1.534 0.125188    
## stat130      2.436e-03  5.657e-03   0.431 0.666753    
## stat131      4.304e-03  5.620e-03   0.766 0.443790    
## stat132     -8.439e-03  5.589e-03  -1.510 0.131130    
## stat133     -1.420e-03  5.638e-03  -0.252 0.801227    
## stat134     -6.782e-03  5.607e-03  -1.210 0.226469    
## stat135      3.862e-04  5.640e-03   0.068 0.945409    
## stat136     -3.635e-03  5.653e-03  -0.643 0.520213    
## stat137      8.668e-03  5.592e-03   1.550 0.121175    
## stat138      2.497e-03  5.625e-03   0.444 0.657161    
## stat139      8.083e-03  5.632e-03   1.435 0.151291    
## stat140      2.031e-03  5.566e-03   0.365 0.715170    
## stat141      1.033e-02  5.566e-03   1.856 0.063477 .  
## stat142     -3.677e-04  5.689e-03  -0.065 0.948469    
## stat143      2.071e-03  5.654e-03   0.366 0.714207    
## stat144      1.591e-02  5.630e-03   2.825 0.004740 ** 
## stat145      5.627e-04  5.663e-03   0.099 0.920853    
## stat146     -1.275e-02  5.618e-03  -2.269 0.023330 *  
## stat147     -1.082e-02  5.688e-03  -1.902 0.057215 .  
## stat148     -8.708e-03  5.580e-03  -1.561 0.118643    
## stat149     -1.329e-02  5.640e-03  -2.356 0.018511 *  
## stat150     -1.085e-03  5.669e-03  -0.191 0.848275    
## stat151      4.775e-04  5.703e-03   0.084 0.933276    
## stat152     -4.765e-03  5.600e-03  -0.851 0.394790    
## stat153      4.585e-03  5.681e-03   0.807 0.419613    
## stat154      3.998e-03  5.706e-03   0.701 0.483523    
## stat155      3.011e-03  5.608e-03   0.537 0.591346    
## stat156      1.558e-02  5.642e-03   2.761 0.005778 ** 
## stat157     -1.267e-03  5.601e-03  -0.226 0.820985    
## stat158      2.363e-03  5.714e-03   0.414 0.679191    
## stat159      3.291e-04  5.618e-03   0.059 0.953292    
## stat160      3.145e-03  5.678e-03   0.554 0.579673    
## stat161      6.998e-03  5.642e-03   1.240 0.214871    
## stat162     -1.415e-04  5.554e-03  -0.025 0.979682    
## stat163      8.284e-03  5.698e-03   1.454 0.146039    
## stat164      6.614e-03  5.664e-03   1.168 0.242966    
## stat165      4.518e-03  5.590e-03   0.808 0.418999    
## stat166     -4.153e-03  5.520e-03  -0.752 0.451917    
## stat167     -1.633e-02  5.623e-03  -2.904 0.003694 ** 
## stat168     -3.490e-03  5.640e-03  -0.619 0.536003    
## stat169      2.155e-03  5.627e-03   0.383 0.701763    
## stat170     -5.804e-03  5.615e-03  -1.034 0.301355    
## stat171      2.541e-03  5.704e-03   0.446 0.655955    
## stat172      1.102e-02  5.597e-03   1.969 0.049037 *  
## stat173     -1.873e-03  5.645e-03  -0.332 0.740024    
## stat174     -1.804e-03  5.616e-03  -0.321 0.748079    
## stat175     -6.700e-03  5.626e-03  -1.191 0.233708    
## stat176     -4.911e-03  5.630e-03  -0.872 0.383113    
## stat177     -1.645e-02  5.607e-03  -2.934 0.003362 ** 
## stat178      4.062e-03  5.671e-03   0.716 0.473818    
## stat179     -5.629e-03  5.615e-03  -1.003 0.316128    
## stat180     -3.202e-03  5.602e-03  -0.572 0.567603    
## stat181      7.103e-03  5.643e-03   1.259 0.208152    
## stat182      1.079e-02  5.670e-03   1.904 0.056988 .  
## stat183      4.880e-03  5.623e-03   0.868 0.385464    
## stat184      9.128e-03  5.663e-03   1.612 0.107029    
## stat185      6.862e-04  5.579e-03   0.123 0.902115    
## stat186     -7.738e-06  5.632e-03  -0.001 0.998904    
## stat187     -8.507e-03  5.576e-03  -1.526 0.127175    
## stat188     -8.726e-04  5.601e-03  -0.156 0.876201    
## stat189     -1.784e-03  5.610e-03  -0.318 0.750444    
## stat190      3.340e-03  5.577e-03   0.599 0.549319    
## stat191     -1.282e-02  5.639e-03  -2.273 0.023057 *  
## stat192     -9.478e-04  5.664e-03  -0.167 0.867104    
## stat193      1.274e-02  5.708e-03   2.231 0.025696 *  
## stat194     -4.000e-03  5.598e-03  -0.715 0.474948    
## stat195      3.797e-03  5.615e-03   0.676 0.498879    
## stat196     -6.950e-03  5.683e-03  -1.223 0.221372    
## stat197     -2.530e-03  5.599e-03  -0.452 0.651454    
## stat198     -9.806e-03  5.626e-03  -1.743 0.081410 .  
## stat199      5.166e-03  5.567e-03   0.928 0.353450    
## stat200     -1.407e-03  5.606e-03  -0.251 0.801797    
## stat201      7.135e-03  5.623e-03   1.269 0.204561    
## stat202     -3.784e-03  5.693e-03  -0.665 0.506323    
## stat203      5.186e-03  5.598e-03   0.926 0.354240    
## stat204     -6.607e-03  5.605e-03  -1.179 0.238613    
## stat205     -1.837e-03  5.598e-03  -0.328 0.742756    
## stat206     -9.401e-03  5.657e-03  -1.662 0.096602 .  
## stat207      1.318e-02  5.651e-03   2.333 0.019678 *  
## stat208      3.992e-03  5.640e-03   0.708 0.479038    
## stat209      4.729e-05  5.585e-03   0.008 0.993244    
## stat210     -8.971e-03  5.622e-03  -1.596 0.110603    
## stat211     -1.713e-03  5.628e-03  -0.304 0.760867    
## stat212      1.782e-03  5.616e-03   0.317 0.750952    
## stat213     -1.030e-03  5.642e-03  -0.182 0.855230    
## stat214     -5.655e-03  5.605e-03  -1.009 0.313104    
## stat215     -9.394e-03  5.626e-03  -1.670 0.095058 .  
## stat216     -3.165e-03  5.624e-03  -0.563 0.573668    
## stat217      1.652e-03  5.617e-03   0.294 0.768693    
## sqrt.x18     8.385e-01  2.153e-02  38.945  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.719 on 5452 degrees of freedom
## Multiple R-squared:   0.41,  Adjusted R-squared:  0.384 
## F-statistic: 15.79 on 240 and 5452 DF,  p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)

## [1] "Number of data points that have Cook's D > 4/n: 246"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before. 
# Checking to see if distributions are different and if so whcih variables
# High Leverage Plot 
plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,target=one_of(label.names))

ggplot(data=plotData, aes(x=type,y=target)) +
  geom_boxplot(fill='light blue',outlier.shape=NA) +
  scale_y_continuous(name="Target Variable Values") +
  theme_light() +
  ggtitle('Distribution of High Leverage Points and Normal  Points')

plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,one_of(feature.names))
# 2 sample t-tests
comp.test = lapply(dplyr::select(plotData, one_of(feature.names)), function(x) t.test(x ~ plotData$type, var.equal = TRUE)) 

sig.comp = list.filter(comp.test, p.value < 0.05)
sapply(sig.comp, function(x) x[['p.value']])
##           x4           x7       stat26       stat38       stat47 
## 1.236797e-02 3.013279e-02 1.928432e-02 5.933891e-03 2.745352e-02 
##       stat74       stat93       stat98      stat110      stat144 
## 4.948249e-02 5.471169e-03 8.687036e-11 1.316119e-07 1.474569e-02 
##      stat146      stat170      stat174      stat200 
## 3.735914e-02 2.366058e-02 3.427287e-02 1.623301e-02
# Distribution (box) Plots
mm = melt(plotData, id=c('type'))

ggplot(mm) +
  geom_boxplot(aes(x=type, y=value))+
  facet_wrap(~variable, ncol=10, scales = 'free') +
  ggtitle('Distribution of High Leverage Points and Normal Points')

ggsave('comparison.jpeg', width =50, height = 400, units='cm',limitsize = FALSE)

Grand Means Model

model.null = lm(grand.mean.formula, data.train)
model.null2 = lm(grand.mean.formula, data.train2)

Variable Selection

Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/

Forward Selection (w/ full train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward = step(model.null, scope=list(lower=model.null, upper=model.full), direction="forward", trace = 0)
  print(summary(model.forward))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward, data.train)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward, data.test, "Forward Selection")
}

Forward Selection (w/ filtered train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward2 = step(model.null2, scope=list(lower=model.null2, upper=model.full2), direction="forward", trace = 0)
  print(summary(model.forward2))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward2, data.train2)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward2, data.test, "Forward Selection (2)")
}

Forward Selection with CV (w/ full train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   , data = data.train
                                   , method = "leapForward"
                                   , feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 11 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD       MAESD
## 1       1 0.9290962 0.1383305 0.7452032 0.02166207 0.04198587 0.014135754
## 2       2 0.9013733 0.1870866 0.7246099 0.01861952 0.03823400 0.010184999
## 3       3 0.8828368 0.2193324 0.7063992 0.01966356 0.03628172 0.013394655
## 4       4 0.8674643 0.2460619 0.6887616 0.01704415 0.03510583 0.008406016
## 5       5 0.8579880 0.2627745 0.6814284 0.02179327 0.04052670 0.012261767
## 6       6 0.8569684 0.2645498 0.6802092 0.02203645 0.04140563 0.012736876
## 7       7 0.8563457 0.2657719 0.6808046 0.02394403 0.04272918 0.013719134
## 8       8 0.8534905 0.2706668 0.6783829 0.02287015 0.04290888 0.012749598
## 9       9 0.8529722 0.2715124 0.6777699 0.02320165 0.04311993 0.013391833
## 10     10 0.8527848 0.2718440 0.6780023 0.02361486 0.04282652 0.013573185
## 11     11 0.8508870 0.2750101 0.6760872 0.02343404 0.04253772 0.013138789
## 12     12 0.8511750 0.2745439 0.6765241 0.02288446 0.04185114 0.012393757
## 13     13 0.8517134 0.2736653 0.6773621 0.02284295 0.04132396 0.012437282
## 14     14 0.8521985 0.2728552 0.6776070 0.02318661 0.04172786 0.012554160
## 15     15 0.8523841 0.2725331 0.6784420 0.02315885 0.04149192 0.012613788
## 16     16 0.8525513 0.2722630 0.6784920 0.02260269 0.04071101 0.012576496
## 17     17 0.8529977 0.2716226 0.6786935 0.02270362 0.04171689 0.012658149
## 18     18 0.8542352 0.2695788 0.6798453 0.02300600 0.04216666 0.013038508
## 19     19 0.8540150 0.2699300 0.6797931 0.02318615 0.04224529 0.013164103
## 20     20 0.8543074 0.2694820 0.6799731 0.02327857 0.04326866 0.013380958
## 21     21 0.8541718 0.2697847 0.6799596 0.02338701 0.04281288 0.012972946
## 22     22 0.8538485 0.2703269 0.6801116 0.02325171 0.04246082 0.013013846
## 23     23 0.8540525 0.2699838 0.6804112 0.02348951 0.04322998 0.013439718
## 24     24 0.8542235 0.2696554 0.6802333 0.02293015 0.04221332 0.012719398
## 25     25 0.8540148 0.2699457 0.6796657 0.02290768 0.04237085 0.012463534
## 26     26 0.8542857 0.2694675 0.6797957 0.02275447 0.04175079 0.012054184
## 27     27 0.8547728 0.2686518 0.6800886 0.02272751 0.04164389 0.012394635
## 28     28 0.8549054 0.2684144 0.6803033 0.02199797 0.04096413 0.011766341
## 29     29 0.8548186 0.2685977 0.6801696 0.02197216 0.04009868 0.011406251
## 30     30 0.8549068 0.2684762 0.6803886 0.02199649 0.03958926 0.011365407
## 31     31 0.8546658 0.2688977 0.6800989 0.02180320 0.03932277 0.011210652
## 32     32 0.8547160 0.2688080 0.6799741 0.02185699 0.03922505 0.010993179
## 33     33 0.8548176 0.2686519 0.6797295 0.02197067 0.03871603 0.011338790
## 34     34 0.8547794 0.2687560 0.6796216 0.02221011 0.03927036 0.011415235
## 35     35 0.8548506 0.2686375 0.6799361 0.02181091 0.03894750 0.011317318
## 36     36 0.8554041 0.2677701 0.6801746 0.02192686 0.03879174 0.011130982
## 37     37 0.8555087 0.2676602 0.6804876 0.02233199 0.03924478 0.011923968
## 38     38 0.8559311 0.2669784 0.6808720 0.02206198 0.03899271 0.011453150
## 39     39 0.8558155 0.2671533 0.6807627 0.02187636 0.03848619 0.011281950
## 40     40 0.8559894 0.2668621 0.6808054 0.02184685 0.03840533 0.011168598
## 41     41 0.8563824 0.2662562 0.6810874 0.02174172 0.03807427 0.010831263
## 42     42 0.8566350 0.2657774 0.6811238 0.02104020 0.03711238 0.010395133
## 43     43 0.8566226 0.2657972 0.6810760 0.02149397 0.03751134 0.010860892
## 44     44 0.8565980 0.2658459 0.6809241 0.02118643 0.03748940 0.010770790
## 45     45 0.8569760 0.2652600 0.6811146 0.02124494 0.03706930 0.010920502
## 46     46 0.8573623 0.2647052 0.6814429 0.02146800 0.03753365 0.011036650
## 47     47 0.8570724 0.2652069 0.6811420 0.02165282 0.03761367 0.011110274
## 48     48 0.8577161 0.2641628 0.6816867 0.02194901 0.03794519 0.011219820
## 49     49 0.8578362 0.2639640 0.6818130 0.02203398 0.03824144 0.011476496
## 50     50 0.8584853 0.2629305 0.6821517 0.02220355 0.03853874 0.011672943
## 51     51 0.8587093 0.2625723 0.6819704 0.02229359 0.03863962 0.011769751
## 52     52 0.8591956 0.2618083 0.6823571 0.02263500 0.03878790 0.011707581
## 53     53 0.8593379 0.2615810 0.6824007 0.02255288 0.03907664 0.011560892
## 54     54 0.8596900 0.2610742 0.6826549 0.02274893 0.03974641 0.011842914
## 55     55 0.8598219 0.2608948 0.6828240 0.02284957 0.03987481 0.011731767
## 56     56 0.8599794 0.2606597 0.6830468 0.02349799 0.04044514 0.012164682
## 57     57 0.8600886 0.2605014 0.6831561 0.02338335 0.04037923 0.012193899
## 58     58 0.8602529 0.2602251 0.6833835 0.02312299 0.03995030 0.011948017
## 59     59 0.8604175 0.2599717 0.6834951 0.02318162 0.04001172 0.011912458
## 60     60 0.8607766 0.2594320 0.6838963 0.02333166 0.03991449 0.011676456
## 61     61 0.8609864 0.2590818 0.6841748 0.02296370 0.03956210 0.011490966
## 62     62 0.8610687 0.2589812 0.6844362 0.02300207 0.03907630 0.011462493
## 63     63 0.8610314 0.2590310 0.6843547 0.02330899 0.03887315 0.011736920
## 64     64 0.8612663 0.2587358 0.6845447 0.02359907 0.03923559 0.012054913
## 65     65 0.8615031 0.2583652 0.6847471 0.02354704 0.03912432 0.012108481
## 66     66 0.8612308 0.2588001 0.6847008 0.02336067 0.03882306 0.011682746
## 67     67 0.8611993 0.2588833 0.6845935 0.02358232 0.03949473 0.011758230
## 68     68 0.8615160 0.2583688 0.6850614 0.02348977 0.03942843 0.011661822
## 69     69 0.8616260 0.2581953 0.6853684 0.02335020 0.03942954 0.011809502
## 70     70 0.8618400 0.2578550 0.6854117 0.02321225 0.03932591 0.011719962
## 71     71 0.8621513 0.2574261 0.6855382 0.02327991 0.03929257 0.011835079
## 72     72 0.8623577 0.2571308 0.6857160 0.02329707 0.03954206 0.011921347
## 73     73 0.8621682 0.2574697 0.6856769 0.02347616 0.03983259 0.012055988
## 74     74 0.8620750 0.2575968 0.6857381 0.02314256 0.03959652 0.011932931
## 75     75 0.8620065 0.2577401 0.6857659 0.02312727 0.03963903 0.011979461
## 76     76 0.8621628 0.2575154 0.6859593 0.02306146 0.03935705 0.011889673
## 77     77 0.8620860 0.2576435 0.6858563 0.02309965 0.03934620 0.011831114
## 78     78 0.8621693 0.2575390 0.6861204 0.02308133 0.03925204 0.011991342
## 79     79 0.8621404 0.2575706 0.6863324 0.02325144 0.03911904 0.012073742
## 80     80 0.8624706 0.2570990 0.6865466 0.02309762 0.03905190 0.012028117
## 81     81 0.8626862 0.2567576 0.6868265 0.02316362 0.03914279 0.012086522
## 82     82 0.8627285 0.2566763 0.6868049 0.02311977 0.03894001 0.012018600
## 83     83 0.8627722 0.2566312 0.6869627 0.02268904 0.03888316 0.011832450
## 84     84 0.8628117 0.2565505 0.6869915 0.02246128 0.03854674 0.011556406
## 85     85 0.8628648 0.2564948 0.6870910 0.02266817 0.03851121 0.012000307
## 86     86 0.8627977 0.2566012 0.6868223 0.02238742 0.03851067 0.011806893
## 87     87 0.8627776 0.2566741 0.6866793 0.02232002 0.03879629 0.011831915
## 88     88 0.8632343 0.2560114 0.6870997 0.02270213 0.03949521 0.012210782
## 89     89 0.8632723 0.2559746 0.6870332 0.02285965 0.03943081 0.012237016
## 90     90 0.8632977 0.2559541 0.6870014 0.02253992 0.03914520 0.012093311
## 91     91 0.8632101 0.2560845 0.6870284 0.02251315 0.03945611 0.012088822
## 92     92 0.8633732 0.2558384 0.6870681 0.02254282 0.03947916 0.012160370
## 93     93 0.8638109 0.2551524 0.6874736 0.02261511 0.03921647 0.012229534
## 94     94 0.8640371 0.2548289 0.6876399 0.02264787 0.03923498 0.012062999
## 95     95 0.8639810 0.2549298 0.6875175 0.02267471 0.03933478 0.012263835
## 96     96 0.8640199 0.2548857 0.6874447 0.02264547 0.03934333 0.011901472
## 97     97 0.8640623 0.2548393 0.6875210 0.02277264 0.03938409 0.012018802
## 98     98 0.8642027 0.2546242 0.6876506 0.02271281 0.03925834 0.011953740
## 99     99 0.8642223 0.2545938 0.6877688 0.02287490 0.03930092 0.012128978
## 100   100 0.8643953 0.2543239 0.6879567 0.02283680 0.03910614 0.012122823
## 101   101 0.8642898 0.2545060 0.6876635 0.02294560 0.03929611 0.012273775
## 102   102 0.8643984 0.2543533 0.6878161 0.02317186 0.03945902 0.012556138
## 103   103 0.8644414 0.2542932 0.6878291 0.02317741 0.03992543 0.012637911
## 104   104 0.8644312 0.2543128 0.6878634 0.02311180 0.04024388 0.012656844
## 105   105 0.8645867 0.2541142 0.6877943 0.02305129 0.04021094 0.012529866
## 106   106 0.8645211 0.2542035 0.6877694 0.02284128 0.04017092 0.012388908
## 107   107 0.8644295 0.2543775 0.6876091 0.02302157 0.04058668 0.012705034
## 108   108 0.8642918 0.2545693 0.6874994 0.02290152 0.04049468 0.012778650
## 109   109 0.8642588 0.2546218 0.6874266 0.02281878 0.04008472 0.012662462
## 110   110 0.8641358 0.2548169 0.6872850 0.02278611 0.03985671 0.012641131
## 111   111 0.8640490 0.2549749 0.6871702 0.02288256 0.04005688 0.012874742
## 112   112 0.8640439 0.2549893 0.6870929 0.02292379 0.04027497 0.012895991
## 113   113 0.8638233 0.2553571 0.6868187 0.02298608 0.04005399 0.012844863
## 114   114 0.8638847 0.2552265 0.6868735 0.02283827 0.03995297 0.012623877
## 115   115 0.8640994 0.2549156 0.6870200 0.02265917 0.03990577 0.012567007
## 116   116 0.8643432 0.2545373 0.6870965 0.02275910 0.03988876 0.012499155
## 117   117 0.8644913 0.2543263 0.6871455 0.02290318 0.03994050 0.012747046
## 118   118 0.8646037 0.2541825 0.6872131 0.02299817 0.04021151 0.012861606
## 119   119 0.8647883 0.2539053 0.6873866 0.02301958 0.04025596 0.012959759
## 120   120 0.8648420 0.2538341 0.6875101 0.02288677 0.04005430 0.012809239
## 121   121 0.8650588 0.2534951 0.6877688 0.02315420 0.04027291 0.013003466
## 122   122 0.8650664 0.2534893 0.6879085 0.02329925 0.04015560 0.013005883
## 123   123 0.8650870 0.2534768 0.6878294 0.02317109 0.04004659 0.012849221
## 124   124 0.8651706 0.2533624 0.6878983 0.02321733 0.04012785 0.012887384
## 125   125 0.8650079 0.2536246 0.6878948 0.02312456 0.04014860 0.012881596
## 126   126 0.8648326 0.2539158 0.6878239 0.02314649 0.04007277 0.012846072
## 127   127 0.8649503 0.2537337 0.6879778 0.02307285 0.04013759 0.012948779
## 128   128 0.8648631 0.2538768 0.6879572 0.02281031 0.03999860 0.012862436
## 129   129 0.8649251 0.2537929 0.6880771 0.02265131 0.03985246 0.012829841
## 130   130 0.8648339 0.2539362 0.6880271 0.02277664 0.03981192 0.012743900
## 131   131 0.8648512 0.2539416 0.6879300 0.02276978 0.03984771 0.012616150
## 132   132 0.8648048 0.2540291 0.6878424 0.02285107 0.03997839 0.012855128
## 133   133 0.8648641 0.2539349 0.6878735 0.02278441 0.03965723 0.012807189
## 134   134 0.8648208 0.2540066 0.6877914 0.02291047 0.03970202 0.013005217
## 135   135 0.8650188 0.2537317 0.6879071 0.02308153 0.03996177 0.013173714
## 136   136 0.8650103 0.2537216 0.6879409 0.02297190 0.03993806 0.013203914
## 137   137 0.8651462 0.2535217 0.6880732 0.02289293 0.03983206 0.013104625
## 138   138 0.8650313 0.2537122 0.6880403 0.02277634 0.04009320 0.013010420
## 139   139 0.8649779 0.2537800 0.6880651 0.02279724 0.04017715 0.013074404
## 140   140 0.8649062 0.2538843 0.6880241 0.02275770 0.04016311 0.013102834
## 141   141 0.8650169 0.2537294 0.6881171 0.02278117 0.04005702 0.013211692
## 142   142 0.8650306 0.2537039 0.6880975 0.02278478 0.04009348 0.013272763
## 143   143 0.8649787 0.2537859 0.6880701 0.02269098 0.04010051 0.013228295
## 144   144 0.8649252 0.2538728 0.6879425 0.02262159 0.03977474 0.013045760
## 145   145 0.8649771 0.2537971 0.6879181 0.02269770 0.03962796 0.013052834
## 146   146 0.8650057 0.2537733 0.6879637 0.02260088 0.03964417 0.013011851
## 147   147 0.8650938 0.2536399 0.6879479 0.02266791 0.03975614 0.013160245
## 148   148 0.8650924 0.2536764 0.6880071 0.02282070 0.03982985 0.013341775
## 149   149 0.8650850 0.2537045 0.6880012 0.02283697 0.03991278 0.013431125
## 150   150 0.8650554 0.2537385 0.6880047 0.02284209 0.03983081 0.013447539
## 151   151 0.8651231 0.2536358 0.6880233 0.02275608 0.03959060 0.013296807
## 152   152 0.8651102 0.2536548 0.6880271 0.02274299 0.03944983 0.013372629
## 153   153 0.8650833 0.2537173 0.6880155 0.02269657 0.03955622 0.013403304
## 154   154 0.8649964 0.2538537 0.6880612 0.02260950 0.03944471 0.013421036
## 155   155 0.8650821 0.2537309 0.6880968 0.02265340 0.03940534 0.013432579
## 156   156 0.8649077 0.2540062 0.6879445 0.02263485 0.03944523 0.013401900
## 157   157 0.8649641 0.2539305 0.6880155 0.02262869 0.03969687 0.013520994
## 158   158 0.8649234 0.2539996 0.6878611 0.02274447 0.03980981 0.013575140
## 159   159 0.8650259 0.2538343 0.6879691 0.02279695 0.03966635 0.013542512
## 160   160 0.8651160 0.2536991 0.6880874 0.02285787 0.03974293 0.013594024
## 161   161 0.8650798 0.2537488 0.6879484 0.02287713 0.03961311 0.013614135
## 162   162 0.8650008 0.2538785 0.6879524 0.02288637 0.03973423 0.013515565
## 163   163 0.8650229 0.2538278 0.6878366 0.02288527 0.03954187 0.013458375
## 164   164 0.8649360 0.2539715 0.6877503 0.02286533 0.03956482 0.013475827
## 165   165 0.8649814 0.2539148 0.6878781 0.02288570 0.03954622 0.013432730
## 166   166 0.8649533 0.2539656 0.6878676 0.02291764 0.03956857 0.013405414
## 167   167 0.8649512 0.2539596 0.6877757 0.02283213 0.03951182 0.013335826
## 168   168 0.8649096 0.2540279 0.6877129 0.02275777 0.03940992 0.013285570
## 169   169 0.8649216 0.2540138 0.6876275 0.02259689 0.03940282 0.013223015
## 170   170 0.8649207 0.2540096 0.6876627 0.02263433 0.03955539 0.013221490
## 171   171 0.8649235 0.2540171 0.6875885 0.02258740 0.03965776 0.013187631
## 172   172 0.8649508 0.2539780 0.6876411 0.02254543 0.03970452 0.013197996
## 173   173 0.8649970 0.2538890 0.6876665 0.02237715 0.03954359 0.013021291
## 174   174 0.8648836 0.2540468 0.6875681 0.02232413 0.03946183 0.012925155
## 175   175 0.8648282 0.2541226 0.6875299 0.02228718 0.03933844 0.012800913
## 176   176 0.8647997 0.2541714 0.6875225 0.02241677 0.03941261 0.012910364
## 177   177 0.8647839 0.2542032 0.6874771 0.02235304 0.03954720 0.012816806
## 178   178 0.8648280 0.2541248 0.6874281 0.02234789 0.03943570 0.012811567
## 179   179 0.8647099 0.2543013 0.6873546 0.02229518 0.03938820 0.012776900
## 180   180 0.8647264 0.2542661 0.6873968 0.02228665 0.03928715 0.012732770
## 181   181 0.8646815 0.2543407 0.6873848 0.02228805 0.03933245 0.012725031
## 182   182 0.8646041 0.2544558 0.6873119 0.02230144 0.03934633 0.012697990
## 183   183 0.8646519 0.2543940 0.6873949 0.02230390 0.03936280 0.012712955
## 184   184 0.8645620 0.2545317 0.6873521 0.02228590 0.03932497 0.012671304
## 185   185 0.8645212 0.2545974 0.6873054 0.02226618 0.03936712 0.012727295
## 186   186 0.8645383 0.2545726 0.6873188 0.02237588 0.03949010 0.012825288
## 187   187 0.8645223 0.2545981 0.6873238 0.02234586 0.03942640 0.012824781
## 188   188 0.8644745 0.2546631 0.6872872 0.02235213 0.03932737 0.012835613
## 189   189 0.8645203 0.2546023 0.6872930 0.02242791 0.03935677 0.012873482
## 190   190 0.8645173 0.2546063 0.6873051 0.02239633 0.03925129 0.012846322
## 191   191 0.8644846 0.2546522 0.6872394 0.02244408 0.03920962 0.012874803
## 192   192 0.8644630 0.2546868 0.6872773 0.02241446 0.03921346 0.012851170
## 193   193 0.8644793 0.2546620 0.6872566 0.02249505 0.03924786 0.012886347
## 194   194 0.8643716 0.2548227 0.6871672 0.02260590 0.03920802 0.012994150
## 195   195 0.8643247 0.2549015 0.6871332 0.02267170 0.03927658 0.013057683
## 196   196 0.8643249 0.2549101 0.6871602 0.02264013 0.03930017 0.013028661
## 197   197 0.8643066 0.2549354 0.6871575 0.02263254 0.03935453 0.012971612
## 198   198 0.8643485 0.2548766 0.6871632 0.02260358 0.03934294 0.012984026
## 199   199 0.8644304 0.2547509 0.6872445 0.02264901 0.03930697 0.012997163
## 200   200 0.8644906 0.2546633 0.6872835 0.02267099 0.03928964 0.012979727
## 201   201 0.8644894 0.2546759 0.6872934 0.02272327 0.03938297 0.013020955
## 202   202 0.8645021 0.2546602 0.6873088 0.02271924 0.03939428 0.013039244
## 203   203 0.8644821 0.2546887 0.6872942 0.02269680 0.03938579 0.013027852
## 204   204 0.8644431 0.2547443 0.6872819 0.02269181 0.03937080 0.013009381
## 205   205 0.8643942 0.2548163 0.6872425 0.02268286 0.03937469 0.013042317
## 206   206 0.8644213 0.2547662 0.6872682 0.02270022 0.03936765 0.013057544
## 207   207 0.8644149 0.2547746 0.6872744 0.02264557 0.03931264 0.013054076
## 208   208 0.8643613 0.2548551 0.6872490 0.02271526 0.03937942 0.013071076
## 209   209 0.8643789 0.2548285 0.6872292 0.02267950 0.03937777 0.013058965
## 210   210 0.8644623 0.2546983 0.6873041 0.02266541 0.03939790 0.013031885
## 211   211 0.8644711 0.2546796 0.6872596 0.02260680 0.03933482 0.013000818
## 212   212 0.8644406 0.2547198 0.6872810 0.02253974 0.03924487 0.012967461
## 213   213 0.8644408 0.2547157 0.6872829 0.02255598 0.03921702 0.012971348
## 214   214 0.8644547 0.2546908 0.6872942 0.02254064 0.03915361 0.012995556
## 215   215 0.8644686 0.2546762 0.6873124 0.02253897 0.03916825 0.013026854
## 216   216 0.8644366 0.2547242 0.6873047 0.02250242 0.03910234 0.013004448
## 217   217 0.8643800 0.2548140 0.6872594 0.02254826 0.03915010 0.013054589
## 218   218 0.8643650 0.2548346 0.6872318 0.02253205 0.03914649 0.013068420
## 219   219 0.8643674 0.2548320 0.6872374 0.02249098 0.03911001 0.013042970
## 220   220 0.8643969 0.2547848 0.6872646 0.02247126 0.03908679 0.013036641
## 221   221 0.8644215 0.2547497 0.6872928 0.02250089 0.03909540 0.013039164
## 222   222 0.8644478 0.2547095 0.6873248 0.02252380 0.03909233 0.013045194
## 223   223 0.8644213 0.2547491 0.6873061 0.02250426 0.03907270 0.013040315
## 224   224 0.8644182 0.2547531 0.6873016 0.02250105 0.03906381 0.013041798
## 225   225 0.8644054 0.2547735 0.6872896 0.02249155 0.03904425 0.013034848
## 226   226 0.8644061 0.2547710 0.6872857 0.02252556 0.03906664 0.013050885
## 227   227 0.8644351 0.2547275 0.6873143 0.02253630 0.03907459 0.013069680
## 228   228 0.8644320 0.2547320 0.6873170 0.02253255 0.03907449 0.013064895
## 229   229 0.8644077 0.2547698 0.6872986 0.02253796 0.03908791 0.013070851
## 230   230 0.8644094 0.2547651 0.6873059 0.02254183 0.03909273 0.013078633
## 231   231 0.8644020 0.2547747 0.6872930 0.02253830 0.03909598 0.013083967
## 232   232 0.8644039 0.2547722 0.6873020 0.02253601 0.03909419 0.013077635
## 233   233 0.8644071 0.2547669 0.6873097 0.02253836 0.03910302 0.013080915
## 234   234 0.8643971 0.2547811 0.6873031 0.02253398 0.03908905 0.013073840
## 235   235 0.8643967 0.2547820 0.6873027 0.02253445 0.03909576 0.013074208
## 236   236 0.8644035 0.2547713 0.6873088 0.02253801 0.03910281 0.013074828
## 237   237 0.8644062 0.2547673 0.6873107 0.02253661 0.03909711 0.013068239
## 238   238 0.8644052 0.2547685 0.6873100 0.02253263 0.03909217 0.013064110
## 239   239 0.8644026 0.2547719 0.6873081 0.02253208 0.03909270 0.013066488
## 240   240 0.8644000 0.2547759 0.6873056 0.02253218 0.03909530 0.013066612
##    nvmax
## 11    11

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##  (Intercept)           x4           x7           x9          x10 
## -3.107249329 -0.001286918  0.338846892  0.098579372  0.037105764 
##          x16          x17          x21       stat14       stat98 
##  0.026656795  0.033446343  0.004230119 -0.023583076  0.103757551 
##      stat110     sqrt.x18 
## -0.094831072  0.800160707

Test

if (algo.forward.caret == TRUE){
    test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.73261 -0.36478 -0.01048 -0.01629  0.36378  1.30380 
## [1] "leapForward  Test MSE: 0.747502177162688"

Forward Selection with CV (w/ filtered train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapForward"
                                   ,feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 16 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD      MAESD
## 1       1 0.8324519 0.1749919 0.6790566 0.01597113 0.03921811 0.01317663
## 2       2 0.7975540 0.2419521 0.6557232 0.01520566 0.03509192 0.01335539
## 3       3 0.7750513 0.2839167 0.6353240 0.01931842 0.02940618 0.01851574
## 4       4 0.7546417 0.3209165 0.6150965 0.01930866 0.02741817 0.01668461
## 5       5 0.7432158 0.3417524 0.6063017 0.02162484 0.02238676 0.01910499
## 6       6 0.7401442 0.3470278 0.6037990 0.02181541 0.02075352 0.02001697
## 7       7 0.7392300 0.3484991 0.6043955 0.01983376 0.01849274 0.01875508
## 8       8 0.7383514 0.3499971 0.6041595 0.01935401 0.01880114 0.01779459
## 9       9 0.7359585 0.3540996 0.6023114 0.01880218 0.01718949 0.01713525
## 10     10 0.7340769 0.3573473 0.6008371 0.01896156 0.01735750 0.01725500
## 11     11 0.7323836 0.3603312 0.5991209 0.01940450 0.01799580 0.01818901
## 12     12 0.7333335 0.3587364 0.6004735 0.01960659 0.01699529 0.01835213
## 13     13 0.7332249 0.3589732 0.6005063 0.01986340 0.01808885 0.01849686
## 14     14 0.7326772 0.3599495 0.5999679 0.02020355 0.01789635 0.01880456
## 15     15 0.7323875 0.3604614 0.5998161 0.01923768 0.01777084 0.01789792
## 16     16 0.7317308 0.3616345 0.5988380 0.02011027 0.01783930 0.01840313
## 17     17 0.7320915 0.3610218 0.5990603 0.02023952 0.01752253 0.01851289
## 18     18 0.7323446 0.3606723 0.5993921 0.02074202 0.01698115 0.01909585
## 19     19 0.7325729 0.3603103 0.5995546 0.02099447 0.01600432 0.01952815
## 20     20 0.7326691 0.3601558 0.5999234 0.02138390 0.01537972 0.02016294
## 21     21 0.7328240 0.3599509 0.6004806 0.02207385 0.01629356 0.02064490
## 22     22 0.7330182 0.3596177 0.6007959 0.02166374 0.01679292 0.02015044
## 23     23 0.7332598 0.3592186 0.6013791 0.02092943 0.01664991 0.01999957
## 24     24 0.7332448 0.3592183 0.6009740 0.02063897 0.01618363 0.01992244
## 25     25 0.7342082 0.3575969 0.6016262 0.02029283 0.01558627 0.02018328
## 26     26 0.7345005 0.3571599 0.6016931 0.02097337 0.01611577 0.02074803
## 27     27 0.7346711 0.3569035 0.6017088 0.02167007 0.01742785 0.02091274
## 28     28 0.7345268 0.3571791 0.6017894 0.02157050 0.01679695 0.02088523
## 29     29 0.7349105 0.3565451 0.6020673 0.02148822 0.01646155 0.02067036
## 30     30 0.7348447 0.3566504 0.6019782 0.02138730 0.01727672 0.02044088
## 31     31 0.7349366 0.3565394 0.6019002 0.02136110 0.01833522 0.02055157
## 32     32 0.7356447 0.3553671 0.6023556 0.02158618 0.01891122 0.02097806
## 33     33 0.7363321 0.3542503 0.6031065 0.02181796 0.01941209 0.02089048
## 34     34 0.7365924 0.3538671 0.6032072 0.02190434 0.01906906 0.02127215
## 35     35 0.7370145 0.3531759 0.6034955 0.02226525 0.01973269 0.02140380
## 36     36 0.7373133 0.3526752 0.6037595 0.02219370 0.02065592 0.02101076
## 37     37 0.7376039 0.3522015 0.6038814 0.02213294 0.02060919 0.02106023
## 38     38 0.7379776 0.3516109 0.6044713 0.02223735 0.02100682 0.02126415
## 39     39 0.7379024 0.3517842 0.6043139 0.02242406 0.02094488 0.02116351
## 40     40 0.7378765 0.3518389 0.6043357 0.02243091 0.02103150 0.02095125
## 41     41 0.7376665 0.3521774 0.6042960 0.02233423 0.02142788 0.02101720
## 42     42 0.7379881 0.3516769 0.6046059 0.02240736 0.02108816 0.02096528
## 43     43 0.7378361 0.3519534 0.6042324 0.02226680 0.02074679 0.02090386
## 44     44 0.7382872 0.3512384 0.6041947 0.02189289 0.02045952 0.02068742
## 45     45 0.7382877 0.3512769 0.6041154 0.02139786 0.02034765 0.02022171
## 46     46 0.7385854 0.3508137 0.6042597 0.02134150 0.01952527 0.02000914
## 47     47 0.7385849 0.3508801 0.6043591 0.02120283 0.01977125 0.01984872
## 48     48 0.7389071 0.3503609 0.6044822 0.02139428 0.02032603 0.02013375
## 49     49 0.7389951 0.3503045 0.6046331 0.02191091 0.02059764 0.02047063
## 50     50 0.7393215 0.3497661 0.6050588 0.02130023 0.01989800 0.01994521
## 51     51 0.7393206 0.3498023 0.6049312 0.02114386 0.02022078 0.01975642
## 52     52 0.7399174 0.3488209 0.6055595 0.02124910 0.02038553 0.01992707
## 53     53 0.7403436 0.3481117 0.6060363 0.02167736 0.02065018 0.02033069
## 54     54 0.7403811 0.3480504 0.6058468 0.02191151 0.02103714 0.02046613
## 55     55 0.7404286 0.3480323 0.6060130 0.02218750 0.02128079 0.02060629
## 56     56 0.7402238 0.3483974 0.6059455 0.02218637 0.02146898 0.02081889
## 57     57 0.7400539 0.3486711 0.6059235 0.02190785 0.02145685 0.02051266
## 58     58 0.7398769 0.3490319 0.6058627 0.02235558 0.02194019 0.02095252
## 59     59 0.7397353 0.3492628 0.6056259 0.02212074 0.02155289 0.02076264
## 60     60 0.7396736 0.3493986 0.6054122 0.02220915 0.02156932 0.02084740
## 61     61 0.7394201 0.3498369 0.6052025 0.02253277 0.02218267 0.02083848
## 62     62 0.7391725 0.3502532 0.6049783 0.02239568 0.02200648 0.02058027
## 63     63 0.7391922 0.3502756 0.6048641 0.02265072 0.02153243 0.02085263
## 64     64 0.7389413 0.3506903 0.6047627 0.02252729 0.02199284 0.02073890
## 65     65 0.7389368 0.3506956 0.6046038 0.02219214 0.02208828 0.02055256
## 66     66 0.7386319 0.3511813 0.6043738 0.02149339 0.02199982 0.02016029
## 67     67 0.7383130 0.3517112 0.6041663 0.02118363 0.02183104 0.01990274
## 68     68 0.7380514 0.3521995 0.6041422 0.02138648 0.02121764 0.01996477
## 69     69 0.7380494 0.3522645 0.6040087 0.02133296 0.02160390 0.01999324
## 70     70 0.7379551 0.3524086 0.6038579 0.02122063 0.02175258 0.02009701
## 71     71 0.7378928 0.3524884 0.6036393 0.02047379 0.02162333 0.01965901
## 72     72 0.7378624 0.3525370 0.6035342 0.02039187 0.02140687 0.01952176
## 73     73 0.7378093 0.3525992 0.6035538 0.02039767 0.02209768 0.01929996
## 74     74 0.7376282 0.3529033 0.6033573 0.02015184 0.02194175 0.01908782
## 75     75 0.7375426 0.3530648 0.6034570 0.02032810 0.02126425 0.01920855
## 76     76 0.7377824 0.3527337 0.6034535 0.02078903 0.02141399 0.01973693
## 77     77 0.7378175 0.3527182 0.6035429 0.02085089 0.02130506 0.01990648
## 78     78 0.7371154 0.3539356 0.6030241 0.02099765 0.02092174 0.02038348
## 79     79 0.7373815 0.3535069 0.6033969 0.02059145 0.02032147 0.02016546
## 80     80 0.7374784 0.3533709 0.6035277 0.02048341 0.01992522 0.02025459
## 81     81 0.7371774 0.3538675 0.6033354 0.02100291 0.01972934 0.02059331
## 82     82 0.7368976 0.3543310 0.6032319 0.02074117 0.01923164 0.02044822
## 83     83 0.7368574 0.3544208 0.6032871 0.02075510 0.01891356 0.02046862
## 84     84 0.7363434 0.3552802 0.6027886 0.02122263 0.01930144 0.02100892
## 85     85 0.7362222 0.3554761 0.6026454 0.02118433 0.01975910 0.02103862
## 86     86 0.7362577 0.3554368 0.6027265 0.02090414 0.01947319 0.02086883
## 87     87 0.7360706 0.3557495 0.6024844 0.02093881 0.01972327 0.02076017
## 88     88 0.7360699 0.3557367 0.6023860 0.02081388 0.01969525 0.02062945
## 89     89 0.7357586 0.3562447 0.6020572 0.02051169 0.01970748 0.02045193
## 90     90 0.7357832 0.3562374 0.6020626 0.02001360 0.01955427 0.02001575
## 91     91 0.7356592 0.3564809 0.6018401 0.02012821 0.02001343 0.02006998
## 92     92 0.7358895 0.3561033 0.6019516 0.02015192 0.02025071 0.01999181
## 93     93 0.7358646 0.3561385 0.6017824 0.02003264 0.02072129 0.01977868
## 94     94 0.7356166 0.3566071 0.6016653 0.02033719 0.02084515 0.02021740
## 95     95 0.7353937 0.3569574 0.6015695 0.02059874 0.02138268 0.02048761
## 96     96 0.7354868 0.3568502 0.6016436 0.02058054 0.02110329 0.02036123
## 97     97 0.7356433 0.3566011 0.6017703 0.02090539 0.02155907 0.02070889
## 98     98 0.7355183 0.3568036 0.6016402 0.02090088 0.02158139 0.02078257
## 99     99 0.7354074 0.3569920 0.6017196 0.02081912 0.02163129 0.02069336
## 100   100 0.7353140 0.3571618 0.6016336 0.02086296 0.02179981 0.02070703
## 101   101 0.7352776 0.3572285 0.6014092 0.02075862 0.02168041 0.02044485
## 102   102 0.7352650 0.3572443 0.6013765 0.02078239 0.02201892 0.02041568
## 103   103 0.7354640 0.3569062 0.6015817 0.02069117 0.02225740 0.02026260
## 104   104 0.7354482 0.3569571 0.6015567 0.02062036 0.02200800 0.02031814
## 105   105 0.7355662 0.3568108 0.6017786 0.02060204 0.02195803 0.02022892
## 106   106 0.7354659 0.3570051 0.6016554 0.02076179 0.02244106 0.02044068
## 107   107 0.7353010 0.3573178 0.6015818 0.02075902 0.02231064 0.02053614
## 108   108 0.7352984 0.3573370 0.6017411 0.02051011 0.02194164 0.02042186
## 109   109 0.7356622 0.3567658 0.6021582 0.02041596 0.02206607 0.02031842
## 110   110 0.7353081 0.3573685 0.6018944 0.02038624 0.02220566 0.02021478
## 111   111 0.7352944 0.3573787 0.6020321 0.02042582 0.02246222 0.02032799
## 112   112 0.7349242 0.3580283 0.6017072 0.02044619 0.02212015 0.02027162
## 113   113 0.7350185 0.3578851 0.6017384 0.02033850 0.02232531 0.02008932
## 114   114 0.7349472 0.3580173 0.6016314 0.02024842 0.02237857 0.02006515
## 115   115 0.7347173 0.3583929 0.6016667 0.02024741 0.02282081 0.01997510
## 116   116 0.7346328 0.3585174 0.6016682 0.02000160 0.02264708 0.01969337
## 117   117 0.7347522 0.3582998 0.6019423 0.01994215 0.02293843 0.01950985
## 118   118 0.7347545 0.3583085 0.6018902 0.02000234 0.02325583 0.01948156
## 119   119 0.7348738 0.3581315 0.6020717 0.01996431 0.02279336 0.01946380
## 120   120 0.7349805 0.3579986 0.6022664 0.02007030 0.02292287 0.01957122
## 121   121 0.7350066 0.3579819 0.6022706 0.02043212 0.02306808 0.01973628
## 122   122 0.7349626 0.3580675 0.6022036 0.02056818 0.02350024 0.01985874
## 123   123 0.7346922 0.3585011 0.6020041 0.02042951 0.02330534 0.01979275
## 124   124 0.7348491 0.3582794 0.6020624 0.02042300 0.02334373 0.01965857
## 125   125 0.7347121 0.3585228 0.6019015 0.02056992 0.02317579 0.01977781
## 126   126 0.7346797 0.3585820 0.6019018 0.02057974 0.02334711 0.01974505
## 127   127 0.7346852 0.3585881 0.6018746 0.02047302 0.02304804 0.01970247
## 128   128 0.7346112 0.3587215 0.6019498 0.02063170 0.02308819 0.01991203
## 129   129 0.7348070 0.3584207 0.6020158 0.02088588 0.02353142 0.02011189
## 130   130 0.7347810 0.3584657 0.6019700 0.02092242 0.02313926 0.02016210
## 131   131 0.7348609 0.3583514 0.6021615 0.02059272 0.02306314 0.01978911
## 132   132 0.7349062 0.3582977 0.6023246 0.02073440 0.02328970 0.01982451
## 133   133 0.7350584 0.3580742 0.6024091 0.02086043 0.02299783 0.01999944
## 134   134 0.7350120 0.3581685 0.6023273 0.02094342 0.02274809 0.02000645
## 135   135 0.7349491 0.3582852 0.6023449 0.02092144 0.02257498 0.01998966
## 136   136 0.7349746 0.3582276 0.6023405 0.02086354 0.02262026 0.01987871
## 137   137 0.7349561 0.3582447 0.6022442 0.02066727 0.02267322 0.01971599
## 138   138 0.7350721 0.3580490 0.6024058 0.02057839 0.02254218 0.01969842
## 139   139 0.7352386 0.3578113 0.6025343 0.02077906 0.02262908 0.01996001
## 140   140 0.7352993 0.3577176 0.6025495 0.02070915 0.02279222 0.01988342
## 141   141 0.7352289 0.3578178 0.6023970 0.02066793 0.02261960 0.01967742
## 142   142 0.7352066 0.3578469 0.6024744 0.02050622 0.02251380 0.01947901
## 143   143 0.7352732 0.3577406 0.6025236 0.02052201 0.02275018 0.01950879
## 144   144 0.7352550 0.3577715 0.6024521 0.02047916 0.02256530 0.01950721
## 145   145 0.7353052 0.3576993 0.6024445 0.02049665 0.02270834 0.01953909
## 146   146 0.7352259 0.3578451 0.6024492 0.02044119 0.02289901 0.01947082
## 147   147 0.7353261 0.3576950 0.6025276 0.02039442 0.02289574 0.01938445
## 148   148 0.7351867 0.3579305 0.6023213 0.02027249 0.02282724 0.01922273
## 149   149 0.7352948 0.3577514 0.6024192 0.02027836 0.02293906 0.01931777
## 150   150 0.7353986 0.3576005 0.6023673 0.02030996 0.02280965 0.01925793
## 151   151 0.7354945 0.3574567 0.6024645 0.02041364 0.02274089 0.01935962
## 152   152 0.7355798 0.3573101 0.6025882 0.02044953 0.02259879 0.01942965
## 153   153 0.7356187 0.3572479 0.6026219 0.02031608 0.02245615 0.01944229
## 154   154 0.7355654 0.3573327 0.6027001 0.02027513 0.02226130 0.01932827
## 155   155 0.7356223 0.3572536 0.6027456 0.02029502 0.02215360 0.01929469
## 156   156 0.7355270 0.3573992 0.6026085 0.02031538 0.02239131 0.01930168
## 157   157 0.7356555 0.3571859 0.6027830 0.02025472 0.02235577 0.01918353
## 158   158 0.7356604 0.3571624 0.6028001 0.02009712 0.02245058 0.01909470
## 159   159 0.7357345 0.3570617 0.6028412 0.02012363 0.02237033 0.01919861
## 160   160 0.7357502 0.3570406 0.6028558 0.02024849 0.02217818 0.01928138
## 161   161 0.7357689 0.3570035 0.6028440 0.02030917 0.02230695 0.01928129
## 162   162 0.7358427 0.3568780 0.6029562 0.02033418 0.02242280 0.01936560
## 163   163 0.7359036 0.3567691 0.6029968 0.02019872 0.02238022 0.01924739
## 164   164 0.7358423 0.3568758 0.6029833 0.02016818 0.02230711 0.01921919
## 165   165 0.7358908 0.3568134 0.6030876 0.02019595 0.02248488 0.01919782
## 166   166 0.7359917 0.3566555 0.6031968 0.02025136 0.02256746 0.01926947
## 167   167 0.7360593 0.3565508 0.6032875 0.02024253 0.02285545 0.01928371
## 168   168 0.7360708 0.3565264 0.6032528 0.02029177 0.02307711 0.01923550
## 169   169 0.7360986 0.3564911 0.6032459 0.02029108 0.02282245 0.01919061
## 170   170 0.7361066 0.3564997 0.6032139 0.02051943 0.02280915 0.01935794
## 171   171 0.7361922 0.3563761 0.6032875 0.02053099 0.02270030 0.01941508
## 172   172 0.7361870 0.3563913 0.6033291 0.02054724 0.02275529 0.01944450
## 173   173 0.7360904 0.3565661 0.6032021 0.02064072 0.02314241 0.01954299
## 174   174 0.7361330 0.3564984 0.6032247 0.02072602 0.02326668 0.01964085
## 175   175 0.7360955 0.3565531 0.6031934 0.02068851 0.02328494 0.01962552
## 176   176 0.7361005 0.3565387 0.6032086 0.02058633 0.02310954 0.01955443
## 177   177 0.7360489 0.3566334 0.6032137 0.02068280 0.02304224 0.01964055
## 178   178 0.7359967 0.3567102 0.6031952 0.02071186 0.02305145 0.01973485
## 179   179 0.7360192 0.3566840 0.6032226 0.02076362 0.02295494 0.01981959
## 180   180 0.7361559 0.3564727 0.6033416 0.02074897 0.02294859 0.01982623
## 181   181 0.7361122 0.3565599 0.6033256 0.02074892 0.02293681 0.01979216
## 182   182 0.7360568 0.3566488 0.6032498 0.02080545 0.02286649 0.01983461
## 183   183 0.7360444 0.3566669 0.6032471 0.02079132 0.02270318 0.01980596
## 184   184 0.7361034 0.3565600 0.6033651 0.02083853 0.02272547 0.01982043
## 185   185 0.7361274 0.3565112 0.6033983 0.02077842 0.02282283 0.01981273
## 186   186 0.7361821 0.3564216 0.6034148 0.02078699 0.02279464 0.01980105
## 187   187 0.7361713 0.3564377 0.6033649 0.02077184 0.02292662 0.01976069
## 188   188 0.7362432 0.3563242 0.6033731 0.02082400 0.02295372 0.01982628
## 189   189 0.7362216 0.3563726 0.6033070 0.02087841 0.02301399 0.01989373
## 190   190 0.7362321 0.3563631 0.6032299 0.02096686 0.02309367 0.01994194
## 191   191 0.7362722 0.3562957 0.6032655 0.02103327 0.02302426 0.01996181
## 192   192 0.7363071 0.3562309 0.6033130 0.02096546 0.02311303 0.01986608
## 193   193 0.7362870 0.3562733 0.6032595 0.02097364 0.02298052 0.01983520
## 194   194 0.7361930 0.3564260 0.6031829 0.02086383 0.02295529 0.01973800
## 195   195 0.7362752 0.3563110 0.6031998 0.02089017 0.02287094 0.01978842
## 196   196 0.7362508 0.3563532 0.6032242 0.02087675 0.02283725 0.01972473
## 197   197 0.7362555 0.3563449 0.6032399 0.02078956 0.02277090 0.01966612
## 198   198 0.7362122 0.3564133 0.6032047 0.02079698 0.02285234 0.01969064
## 199   199 0.7362243 0.3563833 0.6032754 0.02076058 0.02288276 0.01970422
## 200   200 0.7362664 0.3563188 0.6033132 0.02078006 0.02290797 0.01965766
## 201   201 0.7362543 0.3563388 0.6032910 0.02075840 0.02296224 0.01965497
## 202   202 0.7361850 0.3564554 0.6032540 0.02079669 0.02298993 0.01966093
## 203   203 0.7361859 0.3564578 0.6032621 0.02079045 0.02302413 0.01962379
## 204   204 0.7361707 0.3564813 0.6032381 0.02081960 0.02312906 0.01963348
## 205   205 0.7362553 0.3563383 0.6033462 0.02076036 0.02301861 0.01958192
## 206   206 0.7362343 0.3563661 0.6033538 0.02069930 0.02309719 0.01953676
## 207   207 0.7362683 0.3563106 0.6033954 0.02074759 0.02319062 0.01958893
## 208   208 0.7362996 0.3562583 0.6034578 0.02073907 0.02320238 0.01959917
## 209   209 0.7363463 0.3561876 0.6034936 0.02070905 0.02313148 0.01954959
## 210   210 0.7363528 0.3561767 0.6034855 0.02069973 0.02309986 0.01956347
## 211   211 0.7363731 0.3561480 0.6034945 0.02069744 0.02313577 0.01957390
## 212   212 0.7364060 0.3560966 0.6035242 0.02071042 0.02316844 0.01959291
## 213   213 0.7364278 0.3560595 0.6035628 0.02074890 0.02319792 0.01961407
## 214   214 0.7364332 0.3560466 0.6035766 0.02076227 0.02323864 0.01962494
## 215   215 0.7364374 0.3560361 0.6035747 0.02077239 0.02326901 0.01964388
## 216   216 0.7364224 0.3560600 0.6035538 0.02078105 0.02331385 0.01965073
## 217   217 0.7364617 0.3559932 0.6035627 0.02076519 0.02322354 0.01964862
## 218   218 0.7364361 0.3560375 0.6035308 0.02075059 0.02327440 0.01963288
## 219   219 0.7364998 0.3559364 0.6035751 0.02073213 0.02328795 0.01963967
## 220   220 0.7364683 0.3559884 0.6035379 0.02073851 0.02327453 0.01966850
## 221   221 0.7364776 0.3559707 0.6035528 0.02073487 0.02329445 0.01966415
## 222   222 0.7364621 0.3559976 0.6035377 0.02073244 0.02330522 0.01968611
## 223   223 0.7364648 0.3559927 0.6035461 0.02071560 0.02329293 0.01966586
## 224   224 0.7364598 0.3560022 0.6035254 0.02074556 0.02333190 0.01970377
## 225   225 0.7364701 0.3559830 0.6035310 0.02076814 0.02332419 0.01972144
## 226   226 0.7364904 0.3559525 0.6035233 0.02080854 0.02334562 0.01974655
## 227   227 0.7364982 0.3559344 0.6035327 0.02081637 0.02329900 0.01976604
## 228   228 0.7365073 0.3559170 0.6035446 0.02078040 0.02326653 0.01973810
## 229   229 0.7365224 0.3558955 0.6035628 0.02079335 0.02323583 0.01974396
## 230   230 0.7365405 0.3558696 0.6035757 0.02079506 0.02324264 0.01976618
## 231   231 0.7365367 0.3558742 0.6035684 0.02078369 0.02321248 0.01976573
## 232   232 0.7365415 0.3558677 0.6035674 0.02079166 0.02321487 0.01976002
## 233   233 0.7365500 0.3558528 0.6035691 0.02078198 0.02320991 0.01974132
## 234   234 0.7365563 0.3558445 0.6035734 0.02079531 0.02323921 0.01974396
## 235   235 0.7365678 0.3558236 0.6035820 0.02079426 0.02323339 0.01974909
## 236   236 0.7365770 0.3558086 0.6035879 0.02078777 0.02325910 0.01975093
## 237   237 0.7365706 0.3558182 0.6035817 0.02078349 0.02325596 0.01974991
## 238   238 0.7365684 0.3558213 0.6035815 0.02078640 0.02326370 0.01975398
## 239   239 0.7365711 0.3558170 0.6035828 0.02078492 0.02325991 0.01975210
## 240   240 0.7365714 0.3558164 0.6035841 0.02078390 0.02325952 0.01975040
##    nvmax
## 16    16

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##  (Intercept)           x4           x7           x8           x9 
## -3.492157240 -0.001454545  0.355484220  0.012272806  0.101739706 
##          x10          x13          x16          x17          x21 
##  0.043243708  0.006184991  0.024719698  0.035827337  0.004385325 
##        stat4       stat14       stat41       stat98      stat110 
## -0.020060046 -0.029429292 -0.018983255  0.108150933 -0.100901219 
##      stat144     sqrt.x18 
##  0.017815602  0.834483878

Test

if (algo.forward.caret == TRUE){
  test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.87585 -0.43536 -0.06651 -0.07052  0.31776  1.30819 
## [1] "leapForward  Test MSE: 0.7529414468572"

Backward Elimination

Train

if (algo.backward == TRUE){
  # Takes too much time
  t1 = Sys.time()
  
  model.backward = step(model.full, data = data.train, direction="backward", trace = 0)
  print(summary(model.backward))

  t2 = Sys.time()
  print (paste("Time taken for Backward Elimination: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.backward, data.train)
}

Test

if (algo.backward == TRUE){
  test.model(model.backard, data.test, "Backward Elimination")
}

Backward Elimination with CV (w/ full train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapBackward"
                                   ,feature.names =  feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 11 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD       MAESD
## 1       1 0.9290962 0.1383305 0.7452032 0.02166207 0.04198587 0.014135754
## 2       2 0.9013733 0.1870866 0.7246099 0.01861952 0.03823400 0.010184999
## 3       3 0.8828368 0.2193324 0.7063992 0.01966356 0.03628172 0.013394655
## 4       4 0.8674643 0.2460619 0.6887616 0.01704415 0.03510583 0.008406016
## 5       5 0.8579880 0.2627745 0.6814284 0.02179327 0.04052670 0.012261767
## 6       6 0.8569684 0.2645498 0.6802092 0.02203645 0.04140563 0.012736876
## 7       7 0.8563457 0.2657719 0.6808046 0.02394403 0.04272918 0.013719134
## 8       8 0.8534905 0.2706668 0.6783829 0.02287015 0.04290888 0.012749598
## 9       9 0.8528383 0.2717334 0.6776977 0.02335663 0.04296204 0.013431847
## 10     10 0.8526548 0.2720586 0.6779706 0.02376427 0.04267483 0.013589627
## 11     11 0.8508870 0.2750101 0.6760872 0.02343404 0.04253772 0.013138789
## 12     12 0.8511750 0.2745439 0.6765241 0.02288446 0.04185114 0.012393757
## 13     13 0.8517134 0.2736653 0.6773621 0.02284295 0.04132396 0.012437282
## 14     14 0.8518478 0.2734689 0.6775363 0.02308257 0.04175450 0.012522662
## 15     15 0.8523132 0.2726861 0.6785336 0.02287065 0.04124222 0.012314134
## 16     16 0.8526225 0.2721730 0.6788055 0.02222458 0.04043358 0.011840572
## 17     17 0.8528898 0.2718155 0.6789886 0.02260654 0.04194517 0.012365083
## 18     18 0.8540215 0.2699360 0.6799038 0.02283415 0.04257539 0.013054530
## 19     19 0.8541910 0.2696181 0.6801496 0.02289056 0.04211078 0.012831512
## 20     20 0.8541721 0.2696737 0.6801220 0.02305524 0.04295958 0.012883581
## 21     21 0.8540377 0.2699964 0.6800000 0.02340748 0.04278618 0.013013284
## 22     22 0.8537160 0.2705355 0.6801630 0.02327103 0.04243395 0.013064691
## 23     23 0.8542592 0.2696194 0.6805248 0.02337080 0.04281720 0.013289804
## 24     24 0.8542880 0.2695184 0.6802718 0.02289806 0.04206660 0.012672175
## 25     25 0.8544619 0.2691764 0.6803247 0.02279861 0.04208416 0.012217146
## 26     26 0.8548445 0.2685096 0.6804798 0.02274286 0.04117687 0.012264468
## 27     27 0.8548010 0.2686363 0.6803991 0.02285009 0.04157289 0.012456412
## 28     28 0.8551304 0.2680779 0.6806202 0.02205566 0.04063723 0.011831738
## 29     29 0.8547024 0.2687965 0.6800072 0.02202766 0.04040219 0.011772600
## 30     30 0.8549589 0.2684074 0.6803856 0.02202922 0.03989320 0.011639163
## 31     31 0.8546342 0.2689882 0.6799935 0.02185273 0.03967867 0.011385428
## 32     32 0.8547427 0.2688249 0.6797585 0.02218458 0.03969216 0.011389108
## 33     33 0.8546787 0.2689505 0.6796950 0.02235351 0.03937749 0.011646099
## 34     34 0.8545418 0.2691788 0.6794022 0.02227961 0.03947556 0.011556022
## 35     35 0.8549708 0.2684493 0.6801318 0.02185350 0.03882313 0.011507117
## 36     36 0.8555460 0.2675421 0.6804990 0.02196343 0.03861953 0.011306190
## 37     37 0.8552748 0.2679953 0.6804133 0.02189799 0.03852539 0.011774412
## 38     38 0.8558140 0.2671356 0.6808794 0.02170943 0.03821526 0.011325822
## 39     39 0.8560290 0.2668325 0.6809451 0.02205444 0.03812113 0.011325417
## 40     40 0.8562127 0.2664942 0.6810055 0.02198948 0.03805974 0.011233690
## 41     41 0.8565916 0.2658941 0.6811728 0.02179964 0.03775803 0.011114039
## 42     42 0.8564008 0.2661526 0.6810091 0.02103147 0.03735151 0.010588153
## 43     43 0.8565398 0.2659343 0.6810375 0.02136343 0.03770645 0.010988505
## 44     44 0.8564356 0.2661311 0.6807103 0.02102999 0.03762771 0.011057399
## 45     45 0.8566405 0.2658594 0.6808431 0.02157039 0.03795943 0.011174740
## 46     46 0.8569661 0.2653589 0.6811835 0.02140732 0.03755600 0.010967479
## 47     47 0.8572349 0.2649524 0.6813495 0.02193827 0.03803486 0.011240758
## 48     48 0.8575940 0.2643620 0.6817615 0.02199235 0.03823065 0.011328661
## 49     49 0.8577924 0.2640817 0.6819810 0.02236332 0.03870643 0.011599964
## 50     50 0.8580384 0.2637006 0.6817874 0.02226482 0.03891848 0.011695695
## 51     51 0.8588178 0.2624359 0.6822169 0.02232601 0.03855421 0.011660920
## 52     52 0.8592334 0.2617732 0.6823859 0.02250139 0.03864128 0.011688612
## 53     53 0.8594251 0.2614863 0.6825229 0.02252013 0.03915174 0.011607916
## 54     54 0.8596801 0.2611223 0.6828582 0.02282794 0.03982802 0.011766798
## 55     55 0.8597036 0.2610845 0.6828648 0.02307534 0.03961927 0.011777666
## 56     56 0.8599495 0.2606877 0.6830403 0.02327308 0.03982076 0.011985187
## 57     57 0.8599219 0.2607450 0.6831402 0.02304228 0.03950234 0.011768947
## 58     58 0.8602436 0.2602361 0.6833939 0.02302300 0.03944911 0.011919769
## 59     59 0.8604636 0.2599016 0.6835385 0.02335329 0.04018387 0.011943676
## 60     60 0.8607547 0.2594935 0.6839073 0.02337382 0.04015811 0.011730696
## 61     61 0.8606878 0.2595798 0.6839600 0.02299717 0.03998111 0.011527495
## 62     62 0.8611273 0.2589191 0.6842656 0.02273221 0.03965278 0.011458587
## 63     63 0.8613545 0.2585633 0.6845327 0.02272846 0.03960151 0.011478245
## 64     64 0.8616041 0.2582266 0.6848358 0.02301646 0.04006241 0.011763177
## 65     65 0.8616298 0.2582115 0.6850070 0.02292652 0.03979662 0.011411143
## 66     66 0.8615680 0.2583130 0.6850001 0.02323994 0.04000469 0.011397776
## 67     67 0.8616065 0.2582328 0.6849396 0.02300880 0.03995993 0.011438906
## 68     68 0.8619121 0.2577258 0.6853149 0.02297638 0.03948560 0.011569265
## 69     69 0.8620010 0.2575996 0.6855489 0.02303470 0.03948382 0.011587113
## 70     70 0.8621876 0.2573172 0.6857408 0.02296062 0.03985317 0.011704253
## 71     71 0.8624907 0.2569009 0.6859087 0.02310003 0.04000739 0.011948345
## 72     72 0.8622932 0.2572212 0.6859003 0.02340283 0.03984064 0.012097932
## 73     73 0.8624180 0.2570222 0.6859880 0.02317919 0.03992050 0.011906265
## 74     74 0.8621152 0.2575276 0.6858810 0.02289644 0.03942985 0.011624615
## 75     75 0.8624383 0.2570587 0.6861368 0.02298373 0.03902107 0.011629957
## 76     76 0.8625528 0.2568969 0.6862417 0.02296016 0.03919619 0.011642372
## 77     77 0.8625453 0.2569403 0.6861738 0.02276174 0.03910593 0.011609786
## 78     78 0.8621474 0.2575449 0.6859454 0.02282321 0.03892175 0.011481268
## 79     79 0.8622038 0.2574533 0.6860222 0.02265113 0.03882590 0.011527443
## 80     80 0.8622496 0.2573860 0.6861336 0.02254530 0.03858594 0.011463362
## 81     81 0.8623784 0.2571766 0.6862624 0.02260998 0.03865733 0.011591014
## 82     82 0.8625536 0.2569046 0.6864175 0.02260920 0.03870323 0.011557710
## 83     83 0.8626205 0.2568160 0.6864811 0.02260591 0.03888756 0.011534196
## 84     84 0.8627555 0.2566066 0.6865974 0.02247749 0.03856659 0.011471017
## 85     85 0.8627816 0.2566244 0.6865651 0.02286595 0.03909097 0.011747462
## 86     86 0.8628420 0.2565855 0.6865279 0.02280113 0.03935743 0.011528747
## 87     87 0.8627422 0.2567764 0.6863802 0.02284789 0.03966106 0.011848689
## 88     88 0.8629300 0.2565071 0.6866068 0.02287053 0.03980878 0.012095441
## 89     89 0.8629683 0.2564434 0.6868000 0.02282260 0.03979448 0.012083280
## 90     90 0.8629137 0.2565476 0.6866984 0.02253930 0.03957695 0.011942373
## 91     91 0.8629893 0.2564289 0.6866892 0.02249956 0.03973792 0.011908955
## 92     92 0.8631752 0.2561613 0.6867849 0.02242927 0.03983101 0.011909856
## 93     93 0.8633571 0.2558983 0.6869199 0.02245292 0.03976522 0.011878171
## 94     94 0.8638168 0.2551766 0.6872707 0.02251699 0.03942819 0.011648506
## 95     95 0.8638216 0.2551740 0.6871601 0.02257876 0.03944053 0.011893657
## 96     96 0.8637762 0.2552840 0.6871223 0.02250355 0.03971598 0.011707383
## 97     97 0.8639722 0.2549872 0.6872561 0.02255294 0.03967591 0.011714337
## 98     98 0.8639248 0.2550571 0.6872591 0.02269389 0.03982203 0.011733838
## 99     99 0.8637976 0.2552673 0.6872833 0.02292304 0.03966988 0.012089633
## 100   100 0.8639276 0.2550751 0.6873494 0.02283134 0.03958488 0.012119770
## 101   101 0.8639310 0.2550947 0.6873294 0.02308219 0.03973637 0.012483451
## 102   102 0.8641214 0.2547996 0.6875137 0.02294089 0.03948148 0.012453598
## 103   103 0.8640438 0.2549026 0.6874782 0.02260457 0.03958040 0.012331183
## 104   104 0.8639971 0.2549554 0.6874443 0.02239359 0.03932374 0.012277081
## 105   105 0.8638146 0.2552952 0.6873980 0.02242931 0.03945707 0.012272337
## 106   106 0.8641285 0.2548169 0.6876629 0.02249397 0.03957512 0.012301427
## 107   107 0.8640896 0.2549081 0.6874165 0.02256714 0.03951018 0.012320135
## 108   108 0.8641048 0.2548412 0.6874022 0.02242589 0.03935083 0.012446474
## 109   109 0.8641617 0.2547380 0.6873514 0.02252793 0.03964234 0.012504004
## 110   110 0.8642098 0.2546959 0.6873772 0.02278292 0.03985004 0.012625401
## 111   111 0.8640482 0.2549814 0.6872809 0.02291029 0.03993400 0.012747840
## 112   112 0.8640244 0.2550357 0.6869920 0.02301998 0.04001106 0.012780784
## 113   113 0.8639652 0.2551343 0.6869245 0.02292857 0.03992673 0.012710565
## 114   114 0.8639743 0.2550847 0.6869271 0.02293254 0.03984016 0.012523956
## 115   115 0.8642209 0.2547209 0.6870939 0.02290239 0.04002515 0.012429189
## 116   116 0.8643167 0.2545824 0.6870455 0.02274262 0.03976835 0.012325218
## 117   117 0.8645242 0.2542777 0.6871576 0.02277597 0.03963868 0.012464669
## 118   118 0.8646938 0.2540354 0.6872503 0.02268876 0.03972119 0.012425397
## 119   119 0.8649579 0.2536270 0.6874933 0.02271760 0.03986520 0.012513436
## 120   120 0.8649371 0.2536700 0.6874823 0.02287068 0.03993748 0.012553750
## 121   121 0.8648339 0.2538322 0.6874138 0.02298608 0.03991887 0.012754798
## 122   122 0.8647333 0.2540264 0.6875500 0.02310457 0.04014672 0.012897937
## 123   123 0.8646267 0.2542404 0.6874359 0.02316639 0.04022057 0.012785627
## 124   124 0.8645869 0.2543033 0.6875010 0.02322197 0.04010865 0.012838807
## 125   125 0.8646461 0.2541968 0.6876069 0.02317220 0.04026107 0.012871946
## 126   126 0.8645924 0.2542667 0.6875251 0.02307234 0.03999936 0.012739342
## 127   127 0.8646980 0.2541050 0.6876729 0.02306944 0.04010978 0.012849229
## 128   128 0.8646428 0.2542081 0.6875173 0.02294934 0.04003712 0.012713484
## 129   129 0.8647113 0.2541212 0.6875933 0.02284774 0.03977380 0.012700798
## 130   130 0.8647358 0.2540712 0.6877752 0.02281343 0.03962359 0.012609551
## 131   131 0.8648609 0.2538915 0.6878855 0.02272326 0.03941823 0.012574837
## 132   132 0.8648319 0.2539574 0.6877861 0.02273318 0.03936173 0.012691553
## 133   133 0.8649105 0.2538527 0.6878333 0.02294705 0.03947070 0.012981433
## 134   134 0.8650420 0.2536625 0.6879702 0.02292603 0.03970010 0.013139260
## 135   135 0.8650928 0.2536056 0.6879470 0.02305873 0.04002251 0.013223251
## 136   136 0.8648953 0.2539091 0.6878607 0.02287449 0.04011945 0.013138043
## 137   137 0.8650255 0.2537175 0.6879611 0.02276319 0.04013553 0.013046224
## 138   138 0.8649903 0.2537767 0.6879718 0.02277112 0.04011856 0.013034614
## 139   139 0.8649349 0.2538561 0.6879858 0.02274462 0.04025750 0.013035527
## 140   140 0.8649034 0.2539040 0.6879633 0.02269025 0.04018077 0.012953284
## 141   141 0.8649193 0.2538974 0.6878754 0.02272227 0.04020676 0.013016631
## 142   142 0.8648729 0.2539898 0.6879420 0.02274411 0.04018747 0.013118582
## 143   143 0.8649622 0.2538488 0.6879913 0.02259673 0.04014691 0.013031794
## 144   144 0.8649820 0.2538330 0.6879869 0.02262471 0.04000037 0.013051627
## 145   145 0.8648718 0.2540148 0.6878303 0.02270151 0.03980715 0.013046133
## 146   146 0.8649564 0.2538647 0.6879425 0.02261232 0.03970548 0.013038314
## 147   147 0.8649800 0.2538379 0.6879495 0.02260836 0.03978784 0.013203084
## 148   148 0.8649653 0.2538458 0.6879608 0.02271157 0.03964422 0.013328384
## 149   149 0.8650108 0.2537992 0.6880340 0.02272053 0.03977841 0.013341805
## 150   150 0.8651220 0.2536244 0.6880882 0.02262437 0.03955008 0.013285695
## 151   151 0.8651755 0.2535563 0.6880471 0.02269941 0.03958231 0.013277294
## 152   152 0.8650761 0.2537128 0.6879846 0.02271006 0.03939514 0.013359954
## 153   153 0.8650536 0.2537715 0.6880018 0.02271943 0.03957421 0.013380857
## 154   154 0.8649925 0.2538601 0.6880135 0.02259248 0.03939956 0.013376176
## 155   155 0.8649516 0.2539226 0.6880343 0.02255573 0.03940979 0.013407221
## 156   156 0.8648686 0.2540589 0.6879204 0.02264680 0.03951699 0.013477016
## 157   157 0.8650065 0.2538640 0.6880397 0.02258333 0.03963190 0.013452779
## 158   158 0.8649871 0.2539021 0.6879411 0.02263934 0.03970666 0.013386977
## 159   159 0.8651086 0.2537065 0.6880230 0.02268426 0.03953457 0.013395536
## 160   160 0.8651160 0.2536991 0.6880874 0.02285787 0.03974293 0.013594024
## 161   161 0.8650613 0.2537753 0.6879290 0.02287940 0.03961053 0.013601198
## 162   162 0.8650488 0.2537993 0.6879546 0.02278189 0.03962506 0.013450968
## 163   163 0.8650231 0.2538287 0.6878645 0.02276132 0.03953795 0.013403980
## 164   164 0.8648944 0.2540423 0.6878135 0.02278099 0.03963036 0.013345888
## 165   165 0.8649215 0.2540099 0.6879079 0.02282972 0.03963680 0.013360375
## 166   166 0.8648713 0.2540927 0.6878505 0.02286969 0.03969039 0.013394311
## 167   167 0.8649512 0.2539690 0.6878033 0.02282400 0.03953047 0.013353501
## 168   168 0.8649285 0.2540071 0.6877612 0.02276059 0.03939986 0.013316590
## 169   169 0.8648972 0.2540578 0.6876413 0.02258711 0.03943131 0.013231809
## 170   170 0.8648942 0.2540518 0.6876403 0.02262459 0.03953507 0.013224188
## 171   171 0.8649062 0.2540396 0.6875762 0.02258325 0.03959937 0.013193407
## 172   172 0.8650566 0.2538108 0.6877447 0.02247007 0.03961119 0.013142999
## 173   173 0.8650676 0.2537694 0.6877975 0.02230431 0.03939524 0.012959351
## 174   174 0.8650000 0.2538676 0.6877197 0.02230395 0.03921615 0.012826886
## 175   175 0.8649376 0.2539730 0.6876140 0.02239028 0.03945357 0.012853691
## 176   176 0.8648355 0.2541163 0.6875443 0.02240521 0.03936185 0.012897293
## 177   177 0.8648765 0.2540552 0.6875267 0.02237418 0.03943439 0.012818691
## 178   178 0.8648786 0.2540415 0.6874495 0.02236134 0.03935604 0.012809291
## 179   179 0.8648010 0.2541530 0.6874119 0.02229807 0.03925081 0.012750033
## 180   180 0.8647967 0.2541562 0.6874390 0.02224118 0.03921167 0.012710925
## 181   181 0.8647086 0.2542935 0.6873807 0.02226099 0.03931208 0.012726684
## 182   182 0.8646611 0.2543636 0.6873609 0.02224473 0.03930673 0.012678117
## 183   183 0.8647279 0.2542700 0.6874623 0.02223169 0.03931377 0.012678377
## 184   184 0.8646002 0.2544690 0.6873438 0.02226686 0.03931839 0.012720786
## 185   185 0.8644855 0.2546544 0.6872713 0.02231349 0.03940733 0.012745724
## 186   186 0.8645108 0.2546150 0.6873103 0.02241869 0.03952603 0.012811135
## 187   187 0.8645275 0.2545961 0.6873216 0.02234788 0.03942737 0.012825024
## 188   188 0.8645064 0.2546186 0.6873170 0.02236477 0.03934974 0.012832677
## 189   189 0.8645203 0.2546023 0.6872930 0.02242791 0.03935677 0.012873482
## 190   190 0.8645173 0.2546063 0.6873051 0.02239633 0.03925129 0.012846322
## 191   191 0.8644846 0.2546522 0.6872394 0.02244408 0.03920962 0.012874803
## 192   192 0.8644630 0.2546868 0.6872773 0.02241446 0.03921346 0.012851170
## 193   193 0.8644793 0.2546620 0.6872566 0.02249505 0.03924786 0.012886347
## 194   194 0.8643716 0.2548227 0.6871672 0.02260590 0.03920802 0.012994150
## 195   195 0.8643247 0.2549015 0.6871332 0.02267170 0.03927658 0.013057683
## 196   196 0.8642979 0.2549506 0.6871421 0.02266730 0.03931860 0.013036793
## 197   197 0.8642901 0.2549610 0.6871346 0.02264930 0.03936633 0.012982213
## 198   198 0.8643392 0.2548873 0.6871532 0.02260894 0.03932886 0.012982648
## 199   199 0.8643977 0.2548031 0.6872204 0.02262645 0.03934463 0.012974638
## 200   200 0.8644362 0.2547429 0.6872425 0.02260962 0.03929912 0.012936024
## 201   201 0.8644549 0.2547207 0.6872944 0.02262497 0.03934129 0.012912525
## 202   202 0.8645196 0.2546250 0.6873477 0.02267341 0.03933872 0.012939766
## 203   203 0.8645116 0.2546414 0.6873200 0.02265484 0.03933973 0.012960228
## 204   204 0.8644469 0.2547392 0.6872751 0.02269068 0.03936621 0.013013217
## 205   205 0.8643884 0.2548245 0.6872247 0.02268476 0.03938514 0.013061549
## 206   206 0.8644334 0.2547485 0.6873006 0.02269867 0.03936913 0.013078552
## 207   207 0.8644446 0.2547308 0.6872961 0.02269099 0.03937424 0.013078325
## 208   208 0.8643692 0.2548497 0.6872574 0.02273320 0.03939420 0.013067579
## 209   209 0.8643791 0.2548292 0.6872379 0.02267944 0.03937843 0.013053887
## 210   210 0.8644623 0.2546983 0.6873041 0.02266541 0.03939790 0.013031885
## 211   211 0.8644711 0.2546796 0.6872596 0.02260680 0.03933482 0.013000818
## 212   212 0.8644700 0.2546716 0.6873007 0.02256364 0.03920865 0.012984019
## 213   213 0.8644564 0.2546894 0.6873028 0.02257493 0.03919300 0.012973622
## 214   214 0.8644395 0.2547147 0.6872950 0.02254866 0.03915731 0.012994967
## 215   215 0.8644533 0.2547003 0.6873115 0.02254708 0.03917199 0.013027569
## 216   216 0.8644245 0.2547440 0.6872982 0.02250888 0.03910549 0.013009636
## 217   217 0.8643835 0.2548102 0.6872651 0.02254643 0.03914950 0.013050115
## 218   218 0.8643650 0.2548346 0.6872318 0.02253205 0.03914649 0.013068420
## 219   219 0.8643674 0.2548320 0.6872374 0.02249098 0.03911001 0.013042970
## 220   220 0.8643969 0.2547848 0.6872646 0.02247126 0.03908679 0.013036641
## 221   221 0.8644215 0.2547497 0.6872928 0.02250089 0.03909540 0.013039164
## 222   222 0.8644478 0.2547095 0.6873248 0.02252380 0.03909233 0.013045194
## 223   223 0.8644123 0.2547637 0.6872991 0.02251835 0.03908691 0.013055552
## 224   224 0.8644113 0.2547644 0.6873002 0.02251647 0.03907481 0.013060854
## 225   225 0.8644143 0.2547603 0.6872968 0.02249715 0.03902484 0.013040494
## 226   226 0.8644059 0.2547721 0.6872893 0.02252592 0.03906091 0.013054429
## 227   227 0.8644351 0.2547275 0.6873143 0.02253630 0.03907459 0.013069680
## 228   228 0.8644320 0.2547320 0.6873170 0.02253255 0.03907449 0.013064895
## 229   229 0.8644077 0.2547698 0.6872986 0.02253796 0.03908791 0.013070851
## 230   230 0.8644094 0.2547651 0.6873059 0.02254183 0.03909273 0.013078633
## 231   231 0.8644020 0.2547747 0.6872930 0.02253830 0.03909598 0.013083967
## 232   232 0.8644039 0.2547722 0.6873020 0.02253601 0.03909419 0.013077635
## 233   233 0.8644071 0.2547669 0.6873097 0.02253836 0.03910302 0.013080915
## 234   234 0.8643971 0.2547811 0.6873031 0.02253398 0.03908905 0.013073840
## 235   235 0.8643967 0.2547820 0.6873027 0.02253445 0.03909576 0.013074208
## 236   236 0.8644035 0.2547713 0.6873088 0.02253801 0.03910281 0.013074828
## 237   237 0.8644062 0.2547673 0.6873107 0.02253661 0.03909711 0.013068239
## 238   238 0.8644052 0.2547685 0.6873100 0.02253263 0.03909217 0.013064110
## 239   239 0.8644026 0.2547719 0.6873081 0.02253208 0.03909270 0.013066488
## 240   240 0.8644000 0.2547759 0.6873056 0.02253218 0.03909530 0.013066612
##    nvmax
## 11    11

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##  (Intercept)           x4           x7           x9          x10 
## -3.107249329 -0.001286918  0.338846892  0.098579372  0.037105764 
##          x16          x17          x21       stat14       stat98 
##  0.026656795  0.033446343  0.004230119 -0.023583076  0.103757551 
##      stat110     sqrt.x18 
## -0.094831072  0.800160707

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.73261 -0.36478 -0.01048 -0.01629  0.36378  1.30380 
## [1] "leapBackward  Test MSE: 0.747502177162688"

Backward Elimination with CV (w/ filtered train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapBackward"
                                   ,feature.names = feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 16 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD      MAESD
## 1       1 0.8324519 0.1749919 0.6790566 0.01597113 0.03921811 0.01317663
## 2       2 0.7975540 0.2419521 0.6557232 0.01520566 0.03509192 0.01335539
## 3       3 0.7750513 0.2839167 0.6353240 0.01931842 0.02940618 0.01851574
## 4       4 0.7546417 0.3209165 0.6150965 0.01930866 0.02741817 0.01668461
## 5       5 0.7432158 0.3417524 0.6063017 0.02162484 0.02238676 0.01910499
## 6       6 0.7401442 0.3470278 0.6037990 0.02181541 0.02075352 0.02001697
## 7       7 0.7388292 0.3491892 0.6041669 0.01940552 0.01914209 0.01848950
## 8       8 0.7383222 0.3500468 0.6038449 0.01932533 0.01885603 0.01744368
## 9       9 0.7359585 0.3540996 0.6023114 0.01880218 0.01718949 0.01713525
## 10     10 0.7340769 0.3573473 0.6008371 0.01896156 0.01735750 0.01725500
## 11     11 0.7323836 0.3603312 0.5991209 0.01940450 0.01799580 0.01818901
## 12     12 0.7333335 0.3587364 0.6004735 0.01960659 0.01699529 0.01835213
## 13     13 0.7332249 0.3589732 0.6005063 0.01986340 0.01808885 0.01849686
## 14     14 0.7330207 0.3593675 0.6002626 0.02005035 0.01830787 0.01879488
## 15     15 0.7325484 0.3601726 0.5999397 0.01917544 0.01801880 0.01790246
## 16     16 0.7317308 0.3616345 0.5988380 0.02011027 0.01783930 0.01840313
## 17     17 0.7322342 0.3607858 0.5991575 0.02040800 0.01736870 0.01864032
## 18     18 0.7324076 0.3605535 0.5993948 0.02081518 0.01690330 0.01909928
## 19     19 0.7327134 0.3600782 0.5996675 0.02115503 0.01584226 0.01967158
## 20     20 0.7327574 0.3599987 0.6001065 0.02116280 0.01509935 0.01986224
## 21     21 0.7330630 0.3594838 0.6007861 0.02163138 0.01613652 0.01997785
## 22     22 0.7330684 0.3595181 0.6006919 0.02164248 0.01686002 0.02016024
## 23     23 0.7330937 0.3594870 0.6011990 0.02099292 0.01643847 0.02001302
## 24     24 0.7335244 0.3587289 0.6011943 0.02028905 0.01625118 0.02006790
## 25     25 0.7340471 0.3578715 0.6016609 0.02088606 0.01663591 0.02073558
## 26     26 0.7341923 0.3576891 0.6015894 0.02186515 0.01729934 0.02111070
## 27     27 0.7345717 0.3570480 0.6018002 0.02169910 0.01755480 0.02071410
## 28     28 0.7344784 0.3572508 0.6020575 0.02184505 0.01701667 0.02060297
## 29     29 0.7349207 0.3565489 0.6023176 0.02197727 0.01729729 0.02073836
## 30     30 0.7350268 0.3563852 0.6023190 0.02167079 0.01817402 0.02068993
## 31     31 0.7351375 0.3562229 0.6021529 0.02164080 0.01894575 0.02098779
## 32     32 0.7357272 0.3552410 0.6024485 0.02206709 0.01994995 0.02105072
## 33     33 0.7365305 0.3539364 0.6031123 0.02210498 0.02036133 0.02132365
## 34     34 0.7367734 0.3535593 0.6035696 0.02223409 0.01935169 0.02136655
## 35     35 0.7371631 0.3529528 0.6036269 0.02248105 0.01987008 0.02128128
## 36     36 0.7377646 0.3519296 0.6039344 0.02241741 0.02080523 0.02122380
## 37     37 0.7377886 0.3518938 0.6041700 0.02241736 0.02054585 0.02116669
## 38     38 0.7378379 0.3518580 0.6043616 0.02263703 0.02133994 0.02137927
## 39     39 0.7379033 0.3517770 0.6043544 0.02264682 0.02146249 0.02139431
## 40     40 0.7377248 0.3520466 0.6041923 0.02243992 0.02176924 0.02120984
## 41     41 0.7377956 0.3519233 0.6043662 0.02223299 0.02192225 0.02091029
## 42     42 0.7378202 0.3519570 0.6043647 0.02220859 0.02152514 0.02087119
## 43     43 0.7380122 0.3516398 0.6042314 0.02196277 0.02103358 0.02086983
## 44     44 0.7383819 0.3510828 0.6044223 0.02219104 0.02086301 0.02079454
## 45     45 0.7381989 0.3514394 0.6041294 0.02173237 0.02006428 0.02037224
## 46     46 0.7382611 0.3514142 0.6041047 0.02177508 0.02012348 0.02056553
## 47     47 0.7384513 0.3510992 0.6042864 0.02134410 0.02026912 0.01997986
## 48     48 0.7390096 0.3502030 0.6045269 0.02145013 0.02033286 0.02015334
## 49     49 0.7391026 0.3501302 0.6044283 0.02197904 0.02049242 0.02045480
## 50     50 0.7395125 0.3494579 0.6049659 0.02157960 0.01978338 0.02000360
## 51     51 0.7395869 0.3493532 0.6050745 0.02174456 0.01997547 0.02022909
## 52     52 0.7399930 0.3487315 0.6055198 0.02220767 0.02081935 0.02067016
## 53     53 0.7403258 0.3481835 0.6057741 0.02254888 0.02131872 0.02116833
## 54     54 0.7403646 0.3481632 0.6057626 0.02279238 0.02198723 0.02149358
## 55     55 0.7405578 0.3479075 0.6060675 0.02255719 0.02150829 0.02129599
## 56     56 0.7402675 0.3483648 0.6057983 0.02247447 0.02157298 0.02125275
## 57     57 0.7400713 0.3486941 0.6057415 0.02248535 0.02186180 0.02112776
## 58     58 0.7398457 0.3490980 0.6055815 0.02277168 0.02188949 0.02137303
## 59     59 0.7396393 0.3494755 0.6054555 0.02256942 0.02168052 0.02130426
## 60     60 0.7399989 0.3488604 0.6056594 0.02258464 0.02197369 0.02111364
## 61     61 0.7394520 0.3498082 0.6052275 0.02273897 0.02188312 0.02111116
## 62     62 0.7391492 0.3503066 0.6049003 0.02259167 0.02200052 0.02088273
## 63     63 0.7388801 0.3507553 0.6046626 0.02263070 0.02239225 0.02073829
## 64     64 0.7385731 0.3512850 0.6044165 0.02250421 0.02241503 0.02068141
## 65     65 0.7389032 0.3507260 0.6046745 0.02196902 0.02188999 0.02029436
## 66     66 0.7386691 0.3511145 0.6045303 0.02137111 0.02123239 0.02000717
## 67     67 0.7384457 0.3514930 0.6043414 0.02120997 0.02099713 0.01991904
## 68     68 0.7380927 0.3521267 0.6042541 0.02088386 0.02089647 0.01985005
## 69     69 0.7379604 0.3523613 0.6040993 0.02081242 0.02064170 0.01975116
## 70     70 0.7383222 0.3517431 0.6041050 0.02038979 0.02101562 0.01958006
## 71     71 0.7381147 0.3520817 0.6039872 0.02017274 0.02127331 0.01957448
## 72     72 0.7379961 0.3523173 0.6037773 0.02027256 0.02106071 0.01953018
## 73     73 0.7382875 0.3518242 0.6038303 0.02054508 0.02181250 0.01973303
## 74     74 0.7376546 0.3528747 0.6032381 0.02041644 0.02236972 0.01971345
## 75     75 0.7376373 0.3529019 0.6033699 0.02036470 0.02143375 0.01991639
## 76     76 0.7380230 0.3523338 0.6035080 0.02056881 0.02122685 0.02009105
## 77     77 0.7376924 0.3529237 0.6032176 0.02111542 0.02098117 0.02065105
## 78     78 0.7372616 0.3536971 0.6029380 0.02091920 0.02077267 0.02045711
## 79     79 0.7370484 0.3540460 0.6027298 0.02082011 0.02023735 0.02050815
## 80     80 0.7370506 0.3540474 0.6030472 0.02067651 0.02021047 0.02043272
## 81     81 0.7371030 0.3539551 0.6032664 0.02053770 0.01948618 0.02031803
## 82     82 0.7365909 0.3548262 0.6029136 0.02068908 0.01917160 0.02045268
## 83     83 0.7365162 0.3549564 0.6028117 0.02075793 0.01890449 0.02047765
## 84     84 0.7361909 0.3555405 0.6025092 0.02105289 0.01897186 0.02089617
## 85     85 0.7361063 0.3556900 0.6024716 0.02105655 0.01939723 0.02105938
## 86     86 0.7360610 0.3557874 0.6023801 0.02085794 0.01957342 0.02079779
## 87     87 0.7357875 0.3562441 0.6021318 0.02058090 0.01963295 0.02060730
## 88     88 0.7358907 0.3560597 0.6021471 0.02057698 0.01956469 0.02052825
## 89     89 0.7358817 0.3561142 0.6019263 0.02027123 0.01944037 0.02006779
## 90     90 0.7357122 0.3563669 0.6016857 0.01990913 0.01958328 0.01973794
## 91     91 0.7356018 0.3566012 0.6017060 0.02013723 0.02001663 0.01985737
## 92     92 0.7358950 0.3561089 0.6016946 0.01997946 0.02016710 0.01959512
## 93     93 0.7358142 0.3562424 0.6016055 0.02022926 0.02069673 0.01979007
## 94     94 0.7355831 0.3566789 0.6014604 0.02040330 0.02084753 0.02003126
## 95     95 0.7353561 0.3570438 0.6014616 0.02048401 0.02127502 0.02018430
## 96     96 0.7351586 0.3573851 0.6013999 0.02071498 0.02108858 0.02055617
## 97     97 0.7353867 0.3570005 0.6016002 0.02085758 0.02169963 0.02071234
## 98     98 0.7353555 0.3570594 0.6015042 0.02080017 0.02148410 0.02057996
## 99     99 0.7353622 0.3570427 0.6014576 0.02077198 0.02156498 0.02042626
## 100   100 0.7353214 0.3571289 0.6014759 0.02088673 0.02166987 0.02056860
## 101   101 0.7352321 0.3572884 0.6014275 0.02080791 0.02175750 0.02035115
## 102   102 0.7353037 0.3571574 0.6014730 0.02080936 0.02201608 0.02049454
## 103   103 0.7354449 0.3569527 0.6015833 0.02071500 0.02211927 0.02045860
## 104   104 0.7354403 0.3569919 0.6015772 0.02064450 0.02197401 0.02029978
## 105   105 0.7355631 0.3568147 0.6018835 0.02054115 0.02179020 0.02018713
## 106   106 0.7352714 0.3573585 0.6017191 0.02059763 0.02177344 0.02027878
## 107   107 0.7352532 0.3573935 0.6017625 0.02070919 0.02207279 0.02061441
## 108   108 0.7351297 0.3576209 0.6017504 0.02042177 0.02186825 0.02047120
## 109   109 0.7352548 0.3574351 0.6019641 0.02010981 0.02196528 0.02020739
## 110   110 0.7352478 0.3574638 0.6018866 0.02023271 0.02256207 0.02014932
## 111   111 0.7350903 0.3577462 0.6018518 0.02025268 0.02221669 0.02013073
## 112   112 0.7349527 0.3579684 0.6017193 0.02030928 0.02250602 0.02005199
## 113   113 0.7350037 0.3578944 0.6018728 0.02021089 0.02265954 0.02002203
## 114   114 0.7347123 0.3583867 0.6015191 0.02009273 0.02233066 0.01995680
## 115   115 0.7346044 0.3585573 0.6014961 0.02014510 0.02288685 0.01988696
## 116   116 0.7346919 0.3584145 0.6016642 0.01989106 0.02285050 0.01963548
## 117   117 0.7347585 0.3582850 0.6018171 0.01976057 0.02298913 0.01940685
## 118   118 0.7348088 0.3582398 0.6018697 0.01985867 0.02314235 0.01943144
## 119   119 0.7350112 0.3579323 0.6021152 0.01980534 0.02330670 0.01932507
## 120   120 0.7350437 0.3579291 0.6021227 0.01999448 0.02307898 0.01951638
## 121   121 0.7349793 0.3580418 0.6022564 0.02041049 0.02307206 0.01970916
## 122   122 0.7349855 0.3580319 0.6021479 0.02053932 0.02297281 0.01982730
## 123   123 0.7347974 0.3583421 0.6020922 0.02047981 0.02328953 0.01981486
## 124   124 0.7349574 0.3580903 0.6021773 0.02047346 0.02349197 0.01969752
## 125   125 0.7349567 0.3581067 0.6021455 0.02066831 0.02355930 0.01983274
## 126   126 0.7348236 0.3583227 0.6019909 0.02062007 0.02356245 0.01973303
## 127   127 0.7346960 0.3585651 0.6019076 0.02046788 0.02302864 0.01972127
## 128   128 0.7346109 0.3587193 0.6019976 0.02055486 0.02302554 0.01979572
## 129   129 0.7347967 0.3584095 0.6020601 0.02037043 0.02322615 0.01964232
## 130   130 0.7346325 0.3586828 0.6017961 0.02035625 0.02307433 0.01960182
## 131   131 0.7347794 0.3584707 0.6019710 0.02060354 0.02320372 0.01981102
## 132   132 0.7347681 0.3585086 0.6021559 0.02074723 0.02357793 0.01986750
## 133   133 0.7346875 0.3586601 0.6020694 0.02074149 0.02365907 0.01990056
## 134   134 0.7346256 0.3587775 0.6020205 0.02079427 0.02361278 0.01986977
## 135   135 0.7347838 0.3585316 0.6021457 0.02094536 0.02318253 0.02002094
## 136   136 0.7349166 0.3583194 0.6021944 0.02085331 0.02285111 0.01980319
## 137   137 0.7349252 0.3583012 0.6021883 0.02065148 0.02256738 0.01960959
## 138   138 0.7350796 0.3580551 0.6023829 0.02059870 0.02233765 0.01962065
## 139   139 0.7350692 0.3580915 0.6023115 0.02065969 0.02254301 0.01964570
## 140   140 0.7351157 0.3580279 0.6023768 0.02066424 0.02275451 0.01967534
## 141   141 0.7351034 0.3580395 0.6022575 0.02065141 0.02266576 0.01966388
## 142   142 0.7351953 0.3578844 0.6024235 0.02065282 0.02275077 0.01970607
## 143   143 0.7352703 0.3577720 0.6024299 0.02061649 0.02265158 0.01959664
## 144   144 0.7353458 0.3576544 0.6025113 0.02064246 0.02269462 0.01961263
## 145   145 0.7353549 0.3576358 0.6025362 0.02057332 0.02282067 0.01954126
## 146   146 0.7353703 0.3576146 0.6024765 0.02055004 0.02317644 0.01943650
## 147   147 0.7354947 0.3574378 0.6025417 0.02051038 0.02301323 0.01934286
## 148   148 0.7353145 0.3577338 0.6024540 0.02041604 0.02301861 0.01932420
## 149   149 0.7354543 0.3575025 0.6025727 0.02051662 0.02285256 0.01940678
## 150   150 0.7354733 0.3574801 0.6024847 0.02057706 0.02272028 0.01937727
## 151   151 0.7355794 0.3573198 0.6025681 0.02059201 0.02264957 0.01941121
## 152   152 0.7355610 0.3573472 0.6025577 0.02046167 0.02252252 0.01943847
## 153   153 0.7357141 0.3571020 0.6026985 0.02037179 0.02241762 0.01946856
## 154   154 0.7355477 0.3573625 0.6027172 0.02026872 0.02229391 0.01933610
## 155   155 0.7356026 0.3572854 0.6027595 0.02028579 0.02211639 0.01930683
## 156   156 0.7355292 0.3574103 0.6026344 0.02034679 0.02240825 0.01936357
## 157   157 0.7355263 0.3573984 0.6026370 0.02025764 0.02233428 0.01920978
## 158   158 0.7356015 0.3572880 0.6026682 0.02012881 0.02224097 0.01914269
## 159   159 0.7356216 0.3572436 0.6026866 0.02027727 0.02242081 0.01925649
## 160   160 0.7357158 0.3570858 0.6028300 0.02039062 0.02249994 0.01934826
## 161   161 0.7357233 0.3570706 0.6027258 0.02033386 0.02258041 0.01925044
## 162   162 0.7356974 0.3571213 0.6027497 0.02035718 0.02260496 0.01924915
## 163   163 0.7357313 0.3570594 0.6028585 0.02038213 0.02257666 0.01921146
## 164   164 0.7359133 0.3567916 0.6030055 0.02043802 0.02265151 0.01929276
## 165   165 0.7359482 0.3567440 0.6030043 0.02034236 0.02271383 0.01920048
## 166   166 0.7359629 0.3567407 0.6030849 0.02033163 0.02248939 0.01920147
## 167   167 0.7359641 0.3567354 0.6031158 0.02032957 0.02275229 0.01925805
## 168   168 0.7360263 0.3566181 0.6031800 0.02030008 0.02282636 0.01923657
## 169   169 0.7360930 0.3565142 0.6032482 0.02027716 0.02279856 0.01921089
## 170   170 0.7361298 0.3564695 0.6032552 0.02052436 0.02283769 0.01941221
## 171   171 0.7362161 0.3563377 0.6032819 0.02052945 0.02276698 0.01941506
## 172   172 0.7361931 0.3563858 0.6032622 0.02063859 0.02293567 0.01957784
## 173   173 0.7361328 0.3564913 0.6032152 0.02060355 0.02321551 0.01950922
## 174   174 0.7361295 0.3565029 0.6031733 0.02065227 0.02325066 0.01957246
## 175   175 0.7360523 0.3566374 0.6031424 0.02079718 0.02333959 0.01969229
## 176   176 0.7360964 0.3565624 0.6031641 0.02071284 0.02316210 0.01961474
## 177   177 0.7360493 0.3566337 0.6031489 0.02066567 0.02299483 0.01960780
## 178   178 0.7360006 0.3567078 0.6031316 0.02067507 0.02290421 0.01968881
## 179   179 0.7360383 0.3566590 0.6031581 0.02070268 0.02304395 0.01967282
## 180   180 0.7361568 0.3564825 0.6032958 0.02068867 0.02300823 0.01968802
## 181   181 0.7361117 0.3565526 0.6033453 0.02062854 0.02289063 0.01966536
## 182   182 0.7361527 0.3564886 0.6033631 0.02064370 0.02271383 0.01961691
## 183   183 0.7361459 0.3564864 0.6034077 0.02059485 0.02269580 0.01956084
## 184   184 0.7361788 0.3564221 0.6034627 0.02063191 0.02272067 0.01959631
## 185   185 0.7362023 0.3563816 0.6034604 0.02059723 0.02285383 0.01957266
## 186   186 0.7362266 0.3563522 0.6034475 0.02072022 0.02296713 0.01969417
## 187   187 0.7361941 0.3563952 0.6034159 0.02076652 0.02310928 0.01976029
## 188   188 0.7362280 0.3563451 0.6034155 0.02083061 0.02309124 0.01980826
## 189   189 0.7362165 0.3563752 0.6033653 0.02089422 0.02312278 0.01988974
## 190   190 0.7361858 0.3564325 0.6032370 0.02099661 0.02315069 0.01995204
## 191   191 0.7361584 0.3564771 0.6031886 0.02107362 0.02322946 0.01996476
## 192   192 0.7362682 0.3562968 0.6032696 0.02096340 0.02320337 0.01981862
## 193   193 0.7362474 0.3563368 0.6032556 0.02096754 0.02309411 0.01981802
## 194   194 0.7361735 0.3564622 0.6032057 0.02092079 0.02307771 0.01974293
## 195   195 0.7362725 0.3563173 0.6032449 0.02095626 0.02303538 0.01980180
## 196   196 0.7362922 0.3562844 0.6032738 0.02087124 0.02294916 0.01972360
## 197   197 0.7363072 0.3562569 0.6032770 0.02079042 0.02291836 0.01967925
## 198   198 0.7362243 0.3563950 0.6032007 0.02078768 0.02283957 0.01968536
## 199   199 0.7362500 0.3563430 0.6032768 0.02074984 0.02288965 0.01969986
## 200   200 0.7362558 0.3563395 0.6032655 0.02077343 0.02281292 0.01965231
## 201   201 0.7362432 0.3563627 0.6032717 0.02075967 0.02292272 0.01965496
## 202   202 0.7361850 0.3564554 0.6032540 0.02079669 0.02298993 0.01966093
## 203   203 0.7361704 0.3564862 0.6032601 0.02077746 0.02300348 0.01962116
## 204   204 0.7361543 0.3565107 0.6032370 0.02080644 0.02310711 0.01963081
## 205   205 0.7362520 0.3563442 0.6033432 0.02076231 0.02302343 0.01958452
## 206   206 0.7362104 0.3564047 0.6033353 0.02071554 0.02313280 0.01955651
## 207   207 0.7362558 0.3563301 0.6033884 0.02075609 0.02320866 0.01959644
## 208   208 0.7362869 0.3562780 0.6034508 0.02074772 0.02322073 0.01960661
## 209   209 0.7363463 0.3561876 0.6034936 0.02070905 0.02313148 0.01954959
## 210   210 0.7363528 0.3561767 0.6034855 0.02069973 0.02309986 0.01956347
## 211   211 0.7363731 0.3561480 0.6034945 0.02069744 0.02313577 0.01957390
## 212   212 0.7364060 0.3560966 0.6035242 0.02071042 0.02316844 0.01959291
## 213   213 0.7364278 0.3560595 0.6035628 0.02074890 0.02319792 0.01961407
## 214   214 0.7364332 0.3560466 0.6035766 0.02076227 0.02323864 0.01962494
## 215   215 0.7364374 0.3560361 0.6035747 0.02077239 0.02326901 0.01964388
## 216   216 0.7364224 0.3560600 0.6035538 0.02078105 0.02331385 0.01965073
## 217   217 0.7364617 0.3559932 0.6035627 0.02076519 0.02322354 0.01964862
## 218   218 0.7364361 0.3560375 0.6035308 0.02075059 0.02327440 0.01963288
## 219   219 0.7364998 0.3559364 0.6035751 0.02073213 0.02328795 0.01963967
## 220   220 0.7364683 0.3559884 0.6035379 0.02073851 0.02327453 0.01966850
## 221   221 0.7364776 0.3559707 0.6035528 0.02073487 0.02329445 0.01966415
## 222   222 0.7364688 0.3559858 0.6035433 0.02073548 0.02331933 0.01968809
## 223   223 0.7364764 0.3559730 0.6035457 0.02072091 0.02331656 0.01966570
## 224   224 0.7364598 0.3560022 0.6035254 0.02074556 0.02333190 0.01970377
## 225   225 0.7364701 0.3559830 0.6035310 0.02076814 0.02332419 0.01972144
## 226   226 0.7364904 0.3559525 0.6035233 0.02080854 0.02334562 0.01974655
## 227   227 0.7364982 0.3559344 0.6035327 0.02081637 0.02329900 0.01976604
## 228   228 0.7365073 0.3559170 0.6035446 0.02078040 0.02326653 0.01973810
## 229   229 0.7365224 0.3558955 0.6035628 0.02079335 0.02323583 0.01974396
## 230   230 0.7365405 0.3558696 0.6035757 0.02079506 0.02324264 0.01976618
## 231   231 0.7365367 0.3558742 0.6035684 0.02078369 0.02321248 0.01976573
## 232   232 0.7365415 0.3558677 0.6035674 0.02079166 0.02321487 0.01976002
## 233   233 0.7365500 0.3558528 0.6035691 0.02078198 0.02320991 0.01974132
## 234   234 0.7365563 0.3558445 0.6035734 0.02079531 0.02323921 0.01974396
## 235   235 0.7365678 0.3558236 0.6035820 0.02079426 0.02323339 0.01974909
## 236   236 0.7365770 0.3558086 0.6035879 0.02078777 0.02325910 0.01975093
## 237   237 0.7365706 0.3558182 0.6035817 0.02078349 0.02325596 0.01974991
## 238   238 0.7365684 0.3558213 0.6035815 0.02078640 0.02326370 0.01975398
## 239   239 0.7365711 0.3558170 0.6035828 0.02078492 0.02325991 0.01975210
## 240   240 0.7365714 0.3558164 0.6035841 0.02078390 0.02325952 0.01975040
##    nvmax
## 16    16

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##  (Intercept)           x4           x7           x8           x9 
## -3.492157240 -0.001454545  0.355484220  0.012272806  0.101739706 
##          x10          x13          x16          x17          x21 
##  0.043243708  0.006184991  0.024719698  0.035827337  0.004385325 
##        stat4       stat14       stat41       stat98      stat110 
## -0.020060046 -0.029429292 -0.018983255  0.108150933 -0.100901219 
##      stat144     sqrt.x18 
##  0.017815602  0.834483878

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.87585 -0.43536 -0.06651 -0.07052  0.31776  1.30819 
## [1] "leapBackward  Test MSE: 0.7529414468572"

Stepwise Selection (w/ full train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise = step(model.null, scope=list(upper=model.full), data = data.train, direction="both", trace = 0)
  print(summary(model.stepwise))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise, data.train)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise, data.test, "Stepwise Selection")
}

Stepwise Selection (w/ filtered train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise2 = step(model.null2, scope=list(upper=model.full2), data = data.train2, direction="both", trace = 0)
  print(summary(model.stepwise2))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise2, data.train2)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise2, data.test, "Stepwise Selection (2)")
}

Stepwise Selection with CV (w/ full train)

Train

if (algo.stepwise.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapSeq"
                                   ,feature.names = feature.names)
  model.stepwise = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 12 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD       MAESD
## 1       1 0.9290962 0.1383305 0.7452032 0.02166207 0.04198587 0.014135754
## 2       2 0.9013733 0.1870866 0.7246099 0.01861952 0.03823400 0.010184999
## 3       3 0.8828368 0.2193324 0.7063992 0.01966356 0.03628172 0.013394655
## 4       4 0.8674643 0.2460619 0.6887616 0.01704415 0.03510583 0.008406016
## 5       5 0.8579880 0.2627745 0.6814284 0.02179327 0.04052670 0.012261767
## 6       6 0.8569684 0.2645498 0.6802092 0.02203645 0.04140563 0.012736876
## 7       7 0.8563457 0.2657719 0.6808046 0.02394403 0.04272918 0.013719134
## 8       8 0.8534905 0.2706668 0.6783829 0.02287015 0.04290888 0.012749598
## 9       9 0.8529722 0.2715124 0.6777699 0.02320165 0.04311993 0.013391833
## 10     10 0.8526548 0.2720586 0.6779706 0.02376427 0.04267483 0.013589627
## 11     11 0.8592462 0.2588644 0.6826772 0.02854448 0.07390302 0.022964735
## 12     12 0.8511750 0.2745439 0.6765241 0.02288446 0.04185114 0.012393757
## 13     13 0.8517134 0.2736653 0.6773621 0.02284295 0.04132396 0.012437282
## 14     14 0.8521985 0.2728552 0.6776070 0.02318661 0.04172786 0.012554160
## 15     15 0.8521204 0.2729841 0.6782299 0.02311762 0.04136872 0.012525199
## 16     16 0.8523420 0.2726211 0.6782683 0.02257694 0.04062385 0.012477076
## 17     17 0.8653912 0.2493958 0.6887789 0.04530603 0.08285612 0.035032313
## 18     18 0.8664298 0.2488630 0.6896405 0.04811823 0.06585673 0.034646537
## 19     19 0.8539601 0.2700189 0.6798540 0.02316022 0.04233733 0.013175048
## 20     20 0.8668939 0.2480180 0.6902320 0.04819056 0.06663702 0.034311623
## 21     21 0.8642425 0.2511989 0.6878512 0.03221808 0.06676730 0.023687453
## 22     22 0.8537160 0.2705355 0.6801630 0.02327103 0.04243395 0.013064691
## 23     23 0.8637294 0.2521137 0.6879790 0.03798044 0.07290745 0.030833281
## 24     24 0.8542235 0.2696554 0.6802333 0.02293015 0.04221332 0.012719398
## 25     25 0.8541811 0.2696389 0.6799857 0.02282994 0.04205152 0.012089392
## 26     26 0.8546566 0.2688146 0.6802633 0.02266422 0.04151583 0.012269597
## 27     27 0.8767418 0.2290677 0.6982105 0.04882613 0.08418249 0.039214019
## 28     28 0.8788063 0.2256143 0.6990313 0.04618459 0.08269363 0.032353326
## 29     29 0.8749458 0.2322631 0.6962447 0.04783405 0.08304087 0.036207520
## 30     30 0.8549637 0.2683766 0.6804920 0.02198460 0.03959669 0.011451076
## 31     31 0.8647284 0.2503511 0.6882188 0.03679787 0.07053245 0.030238723
## 32     32 0.8546717 0.2688960 0.6797947 0.02187467 0.03931529 0.011185968
## 33     33 0.8772568 0.2281355 0.6988350 0.04989834 0.08679486 0.038415024
## 34     34 0.8603946 0.2573264 0.6854246 0.03873315 0.07314417 0.026485229
## 35     35 0.8649349 0.2498176 0.6884678 0.03862462 0.07464023 0.029474034
## 36     36 0.8675542 0.2463041 0.6904810 0.04197542 0.06670322 0.030614534
## 37     37 0.8554302 0.2677905 0.6804810 0.02235133 0.03924079 0.011918460
## 38     38 0.8654168 0.2493145 0.6882926 0.03608757 0.06885478 0.029096759
## 39     39 0.8617151 0.2552564 0.6864514 0.03832504 0.07213766 0.025485540
## 40     40 0.8560876 0.2667109 0.6809582 0.02188569 0.03824452 0.011161918
## 41     41 0.8684854 0.2449193 0.6911208 0.04138726 0.06536557 0.030047295
## 42     42 0.8882396 0.2095647 0.7077493 0.06897498 0.09635957 0.051397919
## 43     43 0.8754865 0.2306226 0.6957377 0.04003432 0.08116157 0.033498776
## 44     44 0.8565878 0.2658579 0.6810084 0.02118513 0.03748617 0.010770760
## 45     45 0.8565837 0.2659115 0.6808639 0.02129148 0.03732904 0.011219796
## 46     46 0.9013965 0.1848578 0.7171912 0.05442618 0.10156931 0.041251554
## 47     47 0.8572349 0.2649524 0.6813495 0.02193827 0.03803486 0.011240758
## 48     48 0.8793374 0.2255716 0.7002163 0.05321307 0.08223839 0.040584468
## 49     49 0.8579625 0.2637804 0.6819299 0.02213328 0.03902071 0.011584976
## 50     50 0.8778158 0.2269386 0.6984182 0.04410538 0.09494430 0.035665236
## 51     51 0.8811443 0.2223433 0.7001555 0.05659171 0.08653088 0.046050915
## 52     52 0.8671868 0.2461065 0.6886579 0.02719622 0.07034032 0.021697795
## 53     53 0.8689406 0.2435577 0.6906708 0.03709882 0.07136106 0.028048566
## 54     54 0.8709897 0.2404769 0.6907611 0.03277571 0.06702352 0.020138183
## 55     55 0.8996079 0.1877615 0.7154512 0.04942480 0.10112955 0.040712595
## 56     56 0.8600842 0.2604802 0.6831365 0.02339426 0.04060547 0.012128698
## 57     57 0.8603140 0.2601310 0.6832761 0.02334804 0.04049024 0.012183250
## 58     58 0.8601307 0.2603921 0.6833547 0.02304277 0.03938342 0.012017043
## 59     59 0.8696021 0.2425861 0.6915929 0.03692024 0.07061387 0.027400886
## 60     60 0.8608022 0.2593928 0.6840477 0.02331408 0.04024684 0.011631424
## 61     61 0.8604522 0.2599246 0.6839588 0.02300514 0.03863991 0.011557748
## 62     62 0.8806124 0.2233794 0.7000012 0.04704204 0.08145571 0.035789180
## 63     63 0.8773176 0.2280068 0.6969521 0.04248218 0.08470144 0.027793585
## 64     64 0.8810309 0.2224512 0.7013505 0.04940894 0.09177211 0.039758568
## 65     65 0.8670225 0.2471064 0.6902743 0.03788985 0.07092014 0.025237890
## 66     66 0.8731854 0.2383067 0.6946161 0.04618196 0.06178479 0.033096822
## 67     67 0.8728068 0.2378223 0.6924465 0.03215770 0.06565624 0.019210848
## 68     68 0.8815644 0.2212636 0.6996105 0.03961818 0.08065734 0.030111535
## 69     69 0.8618591 0.2578141 0.6853754 0.02322893 0.03919625 0.011619325
## 70     70 0.8884306 0.2082516 0.7071954 0.05812558 0.10235236 0.044693376
## 71     71 0.8936736 0.2005838 0.7097145 0.05150474 0.08428413 0.039397283
## 72     72 0.8826488 0.2195138 0.7009272 0.04034558 0.08307368 0.028684761
## 73     73 0.8708003 0.2410599 0.6928054 0.03601704 0.06774861 0.028430027
## 74     74 0.8851506 0.2165233 0.7055237 0.05289536 0.08900420 0.040310853
## 75     75 0.9024112 0.1828704 0.7169639 0.04490969 0.09784267 0.035139173
## 76     76 0.8894777 0.2072132 0.7078742 0.05854658 0.09423536 0.044278302
## 77     77 0.8852117 0.2172055 0.7057082 0.05654604 0.08646999 0.042327780
## 78     78 0.8736245 0.2371511 0.6962518 0.04348959 0.07789426 0.032355679
## 79     79 0.8823787 0.2212148 0.7026682 0.05160411 0.07807982 0.039953159
## 80     80 0.8705796 0.2411228 0.6924314 0.02865264 0.07198126 0.021020516
## 81     81 0.8877214 0.2129597 0.7070209 0.06026905 0.08419500 0.045819968
## 82     82 0.8718755 0.2393441 0.6944369 0.03626828 0.06934597 0.025941374
## 83     83 0.8838781 0.2183113 0.7031931 0.04361349 0.07608064 0.033025430
## 84     84 0.8913458 0.2036215 0.7108298 0.05214140 0.09958645 0.041422365
## 85     85 0.8773652 0.2282558 0.6980706 0.04139201 0.08212348 0.029008154
## 86     86 0.8840355 0.2179736 0.7043418 0.04808080 0.08303374 0.036310050
## 87     87 0.8762039 0.2336202 0.6972638 0.05188528 0.07222604 0.038792378
## 88     88 0.8927468 0.2016463 0.7101407 0.04708470 0.08580829 0.038592611
## 89     89 0.8888330 0.2074600 0.7055687 0.04292959 0.09088982 0.029545714
## 90     90 0.8722733 0.2389782 0.6932011 0.02982020 0.06136015 0.020894842
## 91     91 0.8722676 0.2388567 0.6947443 0.03616474 0.06992646 0.026323733
## 92     92 0.8725073 0.2384822 0.6948313 0.03620574 0.07002347 0.026360716
## 93     93 0.8875150 0.2135841 0.7073091 0.06046381 0.09261348 0.046440734
## 94     94 0.8731217 0.2375333 0.6951578 0.03652717 0.07044869 0.026364251
## 95     95 0.8768879 0.2326285 0.6978042 0.05140260 0.07157151 0.038677782
## 96     96 0.8759888 0.2337066 0.6975270 0.04194979 0.06672025 0.031038667
## 97     97 0.8640393 0.2548771 0.6874742 0.02277700 0.03935692 0.011976044
## 98     98 0.8772847 0.2320407 0.6979587 0.05146057 0.07165789 0.038611919
## 99     99 0.8641730 0.2546537 0.6876961 0.02286900 0.03913186 0.012114246
## 100   100 0.8803380 0.2231860 0.7004888 0.03750128 0.08669919 0.031974912
## 101   101 0.8641338 0.2547553 0.6875914 0.02320483 0.03957584 0.012739232
## 102   102 0.8728191 0.2382632 0.6947072 0.03573937 0.06738290 0.028659307
## 103   103 0.8642184 0.2546376 0.6876263 0.02306169 0.04005427 0.012564985
## 104   104 0.9070031 0.1769976 0.7212125 0.05680768 0.10416526 0.044090268
## 105   105 0.8851537 0.2169835 0.7031597 0.04760338 0.07369721 0.036095791
## 106   106 0.9070263 0.1767456 0.7194628 0.05292611 0.09101965 0.043005476
## 107   107 0.8867265 0.2151283 0.7065249 0.05616180 0.08627617 0.042537910
## 108   108 0.8639891 0.2550239 0.6873193 0.02239155 0.03952065 0.012436923
## 109   109 0.8801083 0.2235875 0.6999916 0.03732838 0.08676179 0.032212545
## 110   110 0.8807569 0.2239190 0.7019930 0.05197925 0.09541004 0.038088294
## 111   111 0.8851187 0.2169684 0.7031294 0.04834117 0.07473916 0.036552970
## 112   112 0.8770871 0.2325202 0.6975738 0.05116784 0.07140511 0.038656385
## 113   113 0.8640121 0.2550583 0.6870976 0.02267188 0.03967150 0.012443892
## 114   114 0.8745881 0.2360984 0.6961662 0.04291672 0.07719521 0.031900651
## 115   115 0.9046269 0.1803687 0.7171142 0.04564135 0.09138454 0.035804565
## 116   116 0.8642401 0.2546997 0.6869600 0.02285731 0.04003515 0.012614590
## 117   117 0.8771594 0.2325226 0.6972262 0.05146249 0.07178309 0.038760109
## 118   118 0.8854134 0.2162876 0.7046713 0.04836690 0.08278563 0.039307496
## 119   119 0.8894733 0.2104330 0.7062549 0.05318618 0.08343556 0.039362485
## 120   120 0.8899002 0.2084282 0.7077414 0.03730294 0.06927021 0.028990438
## 121   121 0.8746127 0.2366280 0.6963254 0.04035056 0.07173887 0.029864521
## 122   122 0.8885697 0.2107163 0.7061694 0.04328841 0.06687192 0.035712029
## 123   123 0.8740852 0.2376003 0.6958867 0.03518837 0.05516854 0.026241577
## 124   124 0.8834646 0.2207512 0.7036889 0.04878528 0.07343681 0.037630315
## 125   125 0.8794633 0.2267051 0.6996689 0.03563218 0.07082593 0.027703618
## 126   126 0.8925908 0.2043680 0.7111830 0.04447155 0.07234679 0.034471704
## 127   127 0.8748020 0.2360708 0.6952982 0.02827947 0.05825259 0.017053051
## 128   128 0.8648083 0.2539653 0.6878299 0.02274295 0.03996957 0.012564448
## 129   129 0.8826518 0.2220369 0.7014501 0.04281040 0.06379247 0.033517715
## 130   130 0.8646399 0.2542235 0.6877132 0.02276994 0.03971378 0.012588351
## 131   131 0.8646324 0.2542562 0.6876825 0.02265952 0.03963237 0.012480575
## 132   132 0.8715545 0.2412794 0.6917191 0.02498441 0.05130787 0.015678647
## 133   133 0.8677883 0.2477182 0.6902254 0.03028370 0.05546897 0.018425862
## 134   134 0.8718828 0.2408014 0.6918519 0.02504678 0.05087376 0.015695925
## 135   135 0.8823202 0.2242094 0.7024046 0.04863454 0.05807892 0.036998867
## 136   136 0.8737746 0.2382214 0.6944203 0.02550716 0.05249109 0.014349532
## 137   137 0.8799797 0.2266310 0.7002312 0.02858453 0.06111989 0.019149774
## 138   138 0.8734105 0.2394631 0.6950353 0.03807328 0.05064869 0.027661458
## 139   139 0.8783060 0.2294752 0.6988318 0.03755445 0.06952706 0.027737948
## 140   140 0.8686723 0.2459676 0.6907344 0.02372950 0.05524067 0.016065905
## 141   141 0.8738229 0.2386657 0.6954078 0.03993769 0.05426891 0.029654188
## 142   142 0.8752683 0.2344584 0.6964709 0.03942645 0.07236075 0.025807268
## 143   143 0.8649516 0.2538469 0.6880035 0.02270431 0.04005201 0.013137647
## 144   144 0.8648703 0.2539760 0.6878330 0.02257195 0.03984935 0.012952278
## 145   145 0.8648879 0.2539826 0.6878674 0.02269427 0.03980446 0.013020316
## 146   146 0.8971563 0.1966214 0.7117757 0.04212337 0.07059136 0.030432577
## 147   147 0.8804787 0.2241747 0.7014397 0.03683519 0.07228040 0.028419387
## 148   148 0.8824781 0.2240488 0.7024609 0.04906939 0.05893353 0.037169895
## 149   149 0.8650119 0.2538145 0.6879636 0.02272954 0.03980632 0.013364885
## 150   150 0.8678560 0.2476674 0.6903307 0.02997338 0.05539486 0.018564403
## 151   151 0.8817579 0.2244302 0.7009435 0.03169543 0.05593915 0.020879383
## 152   152 0.8803579 0.2264860 0.7010962 0.03557039 0.06039931 0.026105165
## 153   153 0.8735485 0.2387192 0.6943027 0.02477171 0.05122594 0.014086845
## 154   154 0.8649591 0.2539126 0.6880521 0.02267430 0.03951105 0.013467896
## 155   155 0.8715558 0.2414222 0.6916535 0.02534874 0.05140065 0.016252723
## 156   156 0.8929528 0.2038308 0.7099383 0.04733755 0.07722409 0.032561109
## 157   157 0.8790360 0.2289557 0.6995256 0.04027924 0.05820745 0.031580655
## 158   158 0.8649624 0.2539341 0.6879187 0.02268128 0.03974367 0.013448366
## 159   159 0.8651086 0.2537065 0.6880230 0.02268426 0.03953457 0.013395536
## 160   160 0.8909971 0.2087177 0.7087473 0.04250954 0.05994092 0.031013233
## 161   161 0.8650798 0.2537488 0.6879484 0.02287713 0.03961311 0.013614135
## 162   162 0.8736932 0.2385111 0.6942976 0.02525143 0.05196760 0.014542208
## 163   163 0.8855711 0.2163066 0.7030001 0.03454141 0.06206196 0.027363824
## 164   164 0.8726273 0.2403997 0.6937603 0.03547333 0.06311365 0.022867300
## 165   165 0.8736284 0.2392300 0.6950981 0.03882071 0.05116782 0.028632653
## 166   166 0.8648703 0.2540941 0.6878219 0.02286913 0.03969184 0.013376296
## 167   167 0.8648695 0.2540862 0.6877568 0.02278479 0.03963432 0.013323996
## 168   168 0.8809082 0.2254124 0.7009813 0.02937190 0.06206168 0.021008957
## 169   169 0.8723501 0.2405821 0.6944909 0.02985847 0.05675374 0.022179638
## 170   170 0.8818617 0.2229149 0.7016526 0.04377386 0.06734066 0.033431363
## 171   171 0.8733695 0.2396602 0.6946862 0.03788549 0.05004169 0.028037450
## 172   172 0.8676704 0.2480953 0.6899803 0.02931807 0.05448867 0.018336753
## 173   173 0.8851133 0.2166751 0.7029597 0.03248678 0.06836129 0.026951291
## 174   174 0.8650240 0.2538197 0.6877425 0.02226390 0.03927430 0.012854128
## 175   175 0.8890549 0.2118360 0.7080556 0.04709299 0.07365969 0.034832260
## 176   176 0.8805256 0.2246046 0.7004189 0.03748008 0.07199557 0.027582978
## 177   177 0.8647839 0.2542032 0.6874771 0.02235304 0.03954720 0.012816806
## 178   178 0.8718102 0.2411029 0.6914010 0.02587972 0.05273196 0.016710424
## 179   179 0.8731629 0.2393300 0.6937857 0.02479172 0.05149687 0.014703320
## 180   180 0.8811385 0.2255356 0.7015483 0.04301651 0.06414298 0.032972103
## 181   181 0.8842254 0.2193868 0.7033373 0.04528023 0.07647359 0.031189227
## 182   182 0.8716690 0.2413000 0.6913236 0.02606394 0.05312675 0.016698780
## 183   183 0.8672093 0.2487632 0.6894750 0.02891905 0.05390437 0.017452539
## 184   184 0.8689999 0.2455908 0.6906412 0.02409409 0.05715011 0.016854923
## 185   185 0.8715594 0.2414954 0.6912570 0.02616141 0.05329524 0.016724595
## 186   186 0.8719753 0.2411510 0.6940006 0.02941072 0.05627120 0.021643044
## 187   187 0.8731164 0.2394060 0.6938644 0.02505685 0.05200775 0.014892528
## 188   188 0.8645064 0.2546186 0.6873170 0.02236477 0.03934974 0.012832677
## 189   189 0.8886391 0.2120439 0.7064379 0.04178800 0.06562940 0.032115996
## 190   190 0.8733950 0.2395563 0.6949561 0.03906619 0.05154115 0.029433254
## 191   191 0.8733670 0.2395952 0.6949050 0.03910316 0.05151870 0.029465142
## 192   192 0.8691130 0.2454601 0.6907534 0.02446839 0.05780810 0.017321558
## 193   193 0.8644793 0.2546620 0.6872566 0.02249505 0.03924786 0.012886347
## 194   194 0.8643716 0.2548227 0.6871672 0.02260590 0.03920802 0.012994150
## 195   195 0.8643247 0.2549015 0.6871332 0.02267170 0.03927658 0.013057683
## 196   196 0.8643249 0.2549101 0.6871602 0.02264013 0.03930017 0.013028661
## 197   197 0.8642901 0.2549610 0.6871346 0.02264930 0.03936633 0.012982213
## 198   198 0.8732370 0.2392421 0.6940626 0.02573510 0.05260128 0.015841510
## 199   199 0.8738084 0.2387886 0.6949240 0.04165417 0.05611294 0.030488302
## 200   200 0.8716587 0.2413819 0.6914229 0.02632597 0.05303961 0.016934591
## 201   201 0.8644377 0.2547544 0.6872507 0.02266078 0.03939412 0.012980464
## 202   202 0.8741259 0.2358486 0.6934874 0.03130923 0.06314310 0.020291484
## 203   203 0.8785278 0.2294904 0.6986103 0.04156550 0.06779175 0.031666697
## 204   204 0.8644431 0.2547443 0.6872819 0.02269181 0.03937080 0.013009381
## 205   205 0.8823931 0.2237955 0.7017230 0.03950371 0.05865018 0.028991331
## 206   206 0.8734837 0.2392705 0.6947757 0.03483170 0.05335544 0.024946217
## 207   207 0.8644249 0.2547562 0.6872881 0.02264428 0.03931416 0.013062958
## 208   208 0.8793411 0.2284044 0.6992139 0.04065246 0.07322975 0.029898400
## 209   209 0.8670289 0.2490398 0.6895016 0.02949785 0.05441231 0.018298519
## 210   210 0.8733651 0.2394519 0.6946821 0.03435576 0.05269755 0.024477263
## 211   211 0.8644711 0.2546796 0.6872596 0.02260680 0.03933482 0.013000818
## 212   212 0.8708648 0.2429031 0.6922468 0.02968653 0.05484614 0.022292458
## 213   213 0.8644610 0.2546821 0.6872939 0.02257251 0.03919190 0.012980672
## 214   214 0.8740499 0.2384127 0.6951331 0.04219420 0.05675851 0.030928163
## 215   215 0.8644686 0.2546762 0.6873124 0.02253897 0.03916825 0.013026854
## 216   216 0.8733635 0.2391005 0.6942561 0.02591895 0.05278118 0.016029176
## 217   217 0.8819504 0.2244822 0.7011244 0.03744117 0.07088016 0.024705589
## 218   218 0.8723499 0.2406825 0.6945431 0.03087677 0.05795997 0.023571694
## 219   219 0.8736745 0.2391764 0.6950409 0.04018613 0.05264524 0.030095604
## 220   220 0.8643969 0.2547848 0.6872646 0.02247126 0.03908679 0.013036641
## 221   221 0.8740701 0.2386177 0.6952739 0.04110725 0.05373176 0.030621403
## 222   222 0.8711660 0.2426002 0.6923722 0.03030997 0.05542114 0.022546614
## 223   223 0.8644213 0.2547491 0.6873061 0.02250426 0.03907270 0.013040315
## 224   224 0.8644092 0.2547678 0.6872945 0.02251523 0.03907811 0.013057313
## 225   225 0.8731131 0.2394807 0.6940223 0.02543556 0.05197587 0.015377385
## 226   226 0.8644059 0.2547721 0.6872893 0.02252592 0.03906091 0.013054429
## 227   227 0.8900605 0.2103957 0.7065287 0.04782282 0.07383154 0.035485234
## 228   228 0.8820149 0.2242614 0.7017007 0.03402472 0.05854791 0.024620942
## 229   229 0.8717625 0.2412633 0.6916625 0.02649880 0.05329161 0.017515169
## 230   230 0.8820866 0.2242965 0.7023370 0.04544115 0.06741282 0.034691037
## 231   231 0.8742304 0.2384237 0.6954509 0.04159119 0.05420019 0.031139592
## 232   232 0.8815484 0.2250557 0.7007517 0.03672137 0.06972011 0.023924947
## 233   233 0.8734000 0.2394418 0.6950262 0.03473377 0.05307766 0.025707778
## 234   234 0.8760350 0.2337359 0.6974612 0.03879997 0.06324504 0.028274825
## 235   235 0.8898549 0.2089199 0.7091862 0.04649916 0.07989804 0.036512553
## 236   236 0.8969270 0.1974004 0.7123172 0.03575605 0.07587214 0.026061925
## 237   237 0.8798184 0.2274932 0.6990523 0.03058788 0.06173640 0.022624304
## 238   238 0.8957749 0.2008137 0.7138462 0.05773310 0.07092531 0.043826623
## 239   239 0.9186306 0.1599065 0.7301135 0.04597878 0.05408139 0.036537540
## 240   240 0.8644000 0.2547759 0.6873056 0.02253218 0.03909530 0.013066612
##    nvmax
## 12    12

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##  (Intercept)           x4           x7           x9          x10 
## -3.108145360 -0.001291464  0.338750221  0.099136004  0.037136438 
##          x16          x17          x21       stat14       stat98 
##  0.026572248  0.033125681  0.004323330 -0.023488174  0.103541988 
##      stat110      stat144     sqrt.x18 
## -0.095167667  0.018553690  0.799296053

Test

if (algo.stepwise.caret == TRUE){
  test.model(model.stepwise, data.test
             ,method = 'leapSeq',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
  
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.70939 -0.36589 -0.01430 -0.01661  0.36316  1.27519 
## [1] "leapSeq  Test MSE: 0.748371268905447"

Stepwise Selection with CV (w/ filtered train)

Train

Test

LASSO (w/ full train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train[,feature.names])
  y = data.train[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO (w/ filtered train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train2[,feature.names])
  y = data.train2[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)  
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO with CV (w/ full train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.0152 on full training set
## glmnet 
## 
## 6002 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE       Rsquared    MAE      
##   0.01000000  0.8532144  0.27102895  0.6795185
##   0.01047616  0.8530213  0.27141794  0.6794233
##   0.01097499  0.8528491  0.27177890  0.6793363
##   0.01149757  0.8526914  0.27212460  0.6792552
##   0.01204504  0.8525430  0.27246450  0.6791915
##   0.01261857  0.8524119  0.27278398  0.6791374
##   0.01321941  0.8522871  0.27310292  0.6790943
##   0.01384886  0.8521825  0.27339851  0.6790843
##   0.01450829  0.8521151  0.27364136  0.6791052
##   0.01519911  0.8520930  0.27381650  0.6791693
##   0.01592283  0.8521066  0.27394280  0.6792861
##   0.01668101  0.8521681  0.27399627  0.6794422
##   0.01747528  0.8522906  0.27395359  0.6796455
##   0.01830738  0.8524733  0.27382033  0.6799146
##   0.01917910  0.8527052  0.27361533  0.6802141
##   0.02009233  0.8529811  0.27334342  0.6805482
##   0.02104904  0.8532616  0.27307913  0.6808795
##   0.02205131  0.8535478  0.27281833  0.6812024
##   0.02310130  0.8538764  0.27248963  0.6815581
##   0.02420128  0.8542394  0.27211208  0.6819348
##   0.02535364  0.8546300  0.27170651  0.6823197
##   0.02656088  0.8550490  0.27127020  0.6827205
##   0.02782559  0.8554551  0.27088709  0.6831322
##   0.02915053  0.8559025  0.27045480  0.6835873
##   0.03053856  0.8563987  0.26995766  0.6840943
##   0.03199267  0.8569489  0.26938837  0.6846654
##   0.03351603  0.8575553  0.26874300  0.6852871
##   0.03511192  0.8582314  0.26799781  0.6859719
##   0.03678380  0.8589671  0.26716995  0.6867211
##   0.03853529  0.8597735  0.26623979  0.6875325
##   0.04037017  0.8605962  0.26531899  0.6883635
##   0.04229243  0.8614498  0.26437838  0.6892116
##   0.04430621  0.8623159  0.26346298  0.6900620
##   0.04641589  0.8632083  0.26254731  0.6909337
##   0.04862602  0.8641029  0.26169822  0.6917996
##   0.05094138  0.8650372  0.26083944  0.6927078
##   0.05336699  0.8659855  0.26003559  0.6936193
##   0.05590810  0.8669816  0.25921837  0.6945813
##   0.05857021  0.8679555  0.25856878  0.6955204
##   0.06135907  0.8689831  0.25791381  0.6965045
##   0.06428073  0.8700734  0.25724500  0.6975246
##   0.06734151  0.8712412  0.25653890  0.6986200
##   0.07054802  0.8725175  0.25573622  0.6997804
##   0.07390722  0.8739158  0.25481438  0.7010341
##   0.07742637  0.8754476  0.25375127  0.7023926
##   0.08111308  0.8771254  0.25252150  0.7038583
##   0.08497534  0.8789628  0.25109447  0.7054485
##   0.08902151  0.8809746  0.24943304  0.7071559
##   0.09326033  0.8831770  0.24749206  0.7089988
##   0.09770100  0.8855875  0.24521641  0.7110085
##   0.10235310  0.8882252  0.24253866  0.7131713
##   0.10722672  0.8911107  0.23937613  0.7155122
##   0.11233240  0.8941995  0.23583019  0.7180042
##   0.11768120  0.8975200  0.23177706  0.7206807
##   0.12328467  0.9008185  0.22798761  0.7233588
##   0.12915497  0.9041860  0.22418211  0.7260862
##   0.13530478  0.9077278  0.22008278  0.7289543
##   0.14174742  0.9115606  0.21522154  0.7320292
##   0.14849683  0.9157436  0.20928071  0.7353421
##   0.15556761  0.9203111  0.20197592  0.7389171
##   0.16297508  0.9250332  0.19399641  0.7425134
##   0.17073526  0.9299220  0.18502659  0.7461637
##   0.17886495  0.9344052  0.17741017  0.7494081
##   0.18738174  0.9386580  0.17034968  0.7524579
##   0.19630407  0.9425379  0.16487289  0.7552377
##   0.20565123  0.9464765  0.15917184  0.7580067
##   0.21544347  0.9507209  0.15192181  0.7610165
##   0.22570197  0.9550683  0.14334390  0.7640930
##   0.23644894  0.9584683  0.13963099  0.7665545
##   0.24770764  0.9614976  0.13858911  0.7687593
##   0.25950242  0.9646354  0.13833046  0.7710595
##   0.27185882  0.9680237  0.13833046  0.7735196
##   0.28480359  0.9717284  0.13833046  0.7761915
##   0.29836472  0.9757779  0.13833046  0.7791014
##   0.31257158  0.9802028  0.13833046  0.7822972
##   0.32745492  0.9850360  0.13833046  0.7858040
##   0.34304693  0.9903132  0.13833046  0.7896581
##   0.35938137  0.9960332  0.13311345  0.7938412
##   0.37649358  0.9982737  0.06590287  0.7954959
##   0.39442061  0.9984602         NaN  0.7956389
##   0.41320124  0.9984602         NaN  0.7956389
##   0.43287613  0.9984602         NaN  0.7956389
##   0.45348785  0.9984602         NaN  0.7956389
##   0.47508102  0.9984602         NaN  0.7956389
##   0.49770236  0.9984602         NaN  0.7956389
##   0.52140083  0.9984602         NaN  0.7956389
##   0.54622772  0.9984602         NaN  0.7956389
##   0.57223677  0.9984602         NaN  0.7956389
##   0.59948425  0.9984602         NaN  0.7956389
##   0.62802914  0.9984602         NaN  0.7956389
##   0.65793322  0.9984602         NaN  0.7956389
##   0.68926121  0.9984602         NaN  0.7956389
##   0.72208090  0.9984602         NaN  0.7956389
##   0.75646333  0.9984602         NaN  0.7956389
##   0.79248290  0.9984602         NaN  0.7956389
##   0.83021757  0.9984602         NaN  0.7956389
##   0.86974900  0.9984602         NaN  0.7956389
##   0.91116276  0.9984602         NaN  0.7956389
##   0.95454846  0.9984602         NaN  0.7956389
##   1.00000000  0.9984602         NaN  0.7956389
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01519911.

##    alpha     lambda
## 10     1 0.01519911
##     alpha     lambda      RMSE   Rsquared       MAE     RMSESD RsquaredSD
## 1       1 0.01000000 0.8532144 0.27102895 0.6795185 0.02213031 0.04153400
## 2       1 0.01047616 0.8530213 0.27141794 0.6794233 0.02212012 0.04162592
## 3       1 0.01097499 0.8528491 0.27177890 0.6793363 0.02211060 0.04169776
## 4       1 0.01149757 0.8526914 0.27212460 0.6792552 0.02209393 0.04175171
## 5       1 0.01204504 0.8525430 0.27246450 0.6791915 0.02207638 0.04180905
## 6       1 0.01261857 0.8524119 0.27278398 0.6791374 0.02204640 0.04185425
## 7       1 0.01321941 0.8522871 0.27310292 0.6790943 0.02201242 0.04191032
## 8       1 0.01384886 0.8521825 0.27339851 0.6790843 0.02197342 0.04196157
## 9       1 0.01450829 0.8521151 0.27364136 0.6791052 0.02192342 0.04198908
## 10      1 0.01519911 0.8520930 0.27381650 0.6791693 0.02186852 0.04203131
## 11      1 0.01592283 0.8521066 0.27394280 0.6792861 0.02180735 0.04210260
## 12      1 0.01668101 0.8521681 0.27399627 0.6794422 0.02173796 0.04218432
## 13      1 0.01747528 0.8522906 0.27395359 0.6796455 0.02168738 0.04223472
## 14      1 0.01830738 0.8524733 0.27382033 0.6799146 0.02167317 0.04234139
## 15      1 0.01917910 0.8527052 0.27361533 0.6802141 0.02167159 0.04248947
## 16      1 0.02009233 0.8529811 0.27334342 0.6805482 0.02165659 0.04261724
## 17      1 0.02104904 0.8532616 0.27307913 0.6808795 0.02164918 0.04274640
## 18      1 0.02205131 0.8535478 0.27281833 0.6812024 0.02162812 0.04282218
## 19      1 0.02310130 0.8538764 0.27248963 0.6815581 0.02158873 0.04284560
## 20      1 0.02420128 0.8542394 0.27211208 0.6819348 0.02152559 0.04280866
## 21      1 0.02535364 0.8546300 0.27170651 0.6823197 0.02144619 0.04275285
## 22      1 0.02656088 0.8550490 0.27127020 0.6827205 0.02137395 0.04266281
## 23      1 0.02782559 0.8554551 0.27088709 0.6831322 0.02129456 0.04264380
## 24      1 0.02915053 0.8559025 0.27045480 0.6835873 0.02123000 0.04264688
## 25      1 0.03053856 0.8563987 0.26995766 0.6840943 0.02116758 0.04264417
## 26      1 0.03199267 0.8569489 0.26938837 0.6846654 0.02109850 0.04264467
## 27      1 0.03351603 0.8575553 0.26874300 0.6852871 0.02101943 0.04266186
## 28      1 0.03511192 0.8582314 0.26799781 0.6859719 0.02092887 0.04267332
## 29      1 0.03678380 0.8589671 0.26716995 0.6867211 0.02082899 0.04267315
## 30      1 0.03853529 0.8597735 0.26623979 0.6875325 0.02072021 0.04266538
## 31      1 0.04037017 0.8605962 0.26531899 0.6883635 0.02061790 0.04266846
## 32      1 0.04229243 0.8614498 0.26437838 0.6892116 0.02050321 0.04260132
## 33      1 0.04430621 0.8623159 0.26346298 0.6900620 0.02039365 0.04255101
## 34      1 0.04641589 0.8632083 0.26254731 0.6909337 0.02030061 0.04241414
## 35      1 0.04862602 0.8641029 0.26169822 0.6917996 0.02019829 0.04231559
## 36      1 0.05094138 0.8650372 0.26083944 0.6927078 0.02009092 0.04215105
## 37      1 0.05336699 0.8659855 0.26003559 0.6936193 0.01996369 0.04203734
## 38      1 0.05590810 0.8669816 0.25921837 0.6945813 0.01987639 0.04188776
## 39      1 0.05857021 0.8679555 0.25856878 0.6955204 0.01978682 0.04192617
## 40      1 0.06135907 0.8689831 0.25791381 0.6965045 0.01973059 0.04199530
## 41      1 0.06428073 0.8700734 0.25724500 0.6975246 0.01967122 0.04212963
## 42      1 0.06734151 0.8712412 0.25653890 0.6986200 0.01962005 0.04223014
## 43      1 0.07054802 0.8725175 0.25573622 0.6997804 0.01956616 0.04231738
## 44      1 0.07390722 0.8739158 0.25481438 0.7010341 0.01951526 0.04240800
## 45      1 0.07742637 0.8754476 0.25375127 0.7023926 0.01946810 0.04250155
## 46      1 0.08111308 0.8771254 0.25252150 0.7038583 0.01942553 0.04259741
## 47      1 0.08497534 0.8789628 0.25109447 0.7054485 0.01938850 0.04269461
## 48      1 0.08902151 0.8809746 0.24943304 0.7071559 0.01935810 0.04279173
## 49      1 0.09326033 0.8831770 0.24749206 0.7089988 0.01933551 0.04288672
## 50      1 0.09770100 0.8855875 0.24521641 0.7110085 0.01932206 0.04297661
## 51      1 0.10235310 0.8882252 0.24253866 0.7131713 0.01931921 0.04305714
## 52      1 0.10722672 0.8911107 0.23937613 0.7155122 0.01932856 0.04312228
## 53      1 0.11233240 0.8941995 0.23583019 0.7180042 0.01933895 0.04326957
## 54      1 0.11768120 0.8975200 0.23177706 0.7206807 0.01940363 0.04330905
## 55      1 0.12328467 0.9008185 0.22798761 0.7233588 0.01950757 0.04376605
## 56      1 0.12915497 0.9041860 0.22418211 0.7260862 0.01977634 0.04402209
## 57      1 0.13530478 0.9077278 0.22008278 0.7289543 0.01998752 0.04451912
## 58      1 0.14174742 0.9115606 0.21522154 0.7320292 0.02021787 0.04494907
## 59      1 0.14849683 0.9157436 0.20928071 0.7353421 0.02045963 0.04526328
## 60      1 0.15556761 0.9203111 0.20197592 0.7389171 0.02071824 0.04543394
## 61      1 0.16297508 0.9250332 0.19399641 0.7425134 0.02084589 0.04595435
## 62      1 0.17073526 0.9299220 0.18502659 0.7461637 0.02105934 0.04541731
## 63      1 0.17886495 0.9344052 0.17741017 0.7494081 0.02112094 0.04583971
## 64      1 0.18738174 0.9386580 0.17034968 0.7524579 0.02146984 0.04486338
## 65      1 0.19630407 0.9425379 0.16487289 0.7552377 0.02157489 0.04507655
## 66      1 0.20565123 0.9464765 0.15917184 0.7580067 0.02186961 0.04500289
## 67      1 0.21544347 0.9507209 0.15192181 0.7610165 0.02220675 0.04445183
## 68      1 0.22570197 0.9550683 0.14334390 0.7640930 0.02269728 0.04189553
## 69      1 0.23644894 0.9584683 0.13963099 0.7665545 0.02269389 0.04230942
## 70      1 0.24770764 0.9614976 0.13858911 0.7687593 0.02310014 0.04208381
## 71      1 0.25950242 0.9646354 0.13833046 0.7710595 0.02352583 0.04198587
## 72      1 0.27185882 0.9680237 0.13833046 0.7735196 0.02400708 0.04198587
## 73      1 0.28480359 0.9717284 0.13833046 0.7761915 0.02453453 0.04198587
## 74      1 0.29836472 0.9757779 0.13833046 0.7791014 0.02511009 0.04198587
## 75      1 0.31257158 0.9802028 0.13833046 0.7822972 0.02573545 0.04198587
## 76      1 0.32745492 0.9850360 0.13833046 0.7858040 0.02641212 0.04198587
## 77      1 0.34304693 0.9903132 0.13833046 0.7896581 0.02714135 0.04198587
## 78      1 0.35938137 0.9960332 0.13311345 0.7938412 0.02785663 0.04095084
## 79      1 0.37649358 0.9982737 0.06590287 0.7954959 0.02648742 0.02007436
## 80      1 0.39442061 0.9984602        NaN 0.7956389 0.02634986         NA
## 81      1 0.41320124 0.9984602        NaN 0.7956389 0.02634986         NA
## 82      1 0.43287613 0.9984602        NaN 0.7956389 0.02634986         NA
## 83      1 0.45348785 0.9984602        NaN 0.7956389 0.02634986         NA
## 84      1 0.47508102 0.9984602        NaN 0.7956389 0.02634986         NA
## 85      1 0.49770236 0.9984602        NaN 0.7956389 0.02634986         NA
## 86      1 0.52140083 0.9984602        NaN 0.7956389 0.02634986         NA
## 87      1 0.54622772 0.9984602        NaN 0.7956389 0.02634986         NA
## 88      1 0.57223677 0.9984602        NaN 0.7956389 0.02634986         NA
## 89      1 0.59948425 0.9984602        NaN 0.7956389 0.02634986         NA
## 90      1 0.62802914 0.9984602        NaN 0.7956389 0.02634986         NA
## 91      1 0.65793322 0.9984602        NaN 0.7956389 0.02634986         NA
## 92      1 0.68926121 0.9984602        NaN 0.7956389 0.02634986         NA
## 93      1 0.72208090 0.9984602        NaN 0.7956389 0.02634986         NA
## 94      1 0.75646333 0.9984602        NaN 0.7956389 0.02634986         NA
## 95      1 0.79248290 0.9984602        NaN 0.7956389 0.02634986         NA
## 96      1 0.83021757 0.9984602        NaN 0.7956389 0.02634986         NA
## 97      1 0.86974900 0.9984602        NaN 0.7956389 0.02634986         NA
## 98      1 0.91116276 0.9984602        NaN 0.7956389 0.02634986         NA
## 99      1 0.95454846 0.9984602        NaN 0.7956389 0.02634986         NA
## 100     1 1.00000000 0.9984602        NaN 0.7956389 0.02634986         NA
##          MAESD
## 1   0.01205254
## 2   0.01205634
## 3   0.01206334
## 4   0.01206718
## 5   0.01207186
## 6   0.01207773
## 7   0.01208212
## 8   0.01208962
## 9   0.01208539
## 10  0.01207119
## 11  0.01203982
## 12  0.01201413
## 13  0.01201180
## 14  0.01203452
## 15  0.01204081
## 16  0.01204296
## 17  0.01206584
## 18  0.01207545
## 19  0.01205847
## 20  0.01204280
## 21  0.01202169
## 22  0.01199355
## 23  0.01196173
## 24  0.01193148
## 25  0.01189170
## 26  0.01184633
## 27  0.01180142
## 28  0.01176667
## 29  0.01173400
## 30  0.01170072
## 31  0.01167052
## 32  0.01162271
## 33  0.01160952
## 34  0.01162639
## 35  0.01164193
## 36  0.01165852
## 37  0.01164774
## 38  0.01164345
## 39  0.01163413
## 40  0.01164999
## 41  0.01166904
## 42  0.01168990
## 43  0.01172180
## 44  0.01177201
## 45  0.01183517
## 46  0.01191829
## 47  0.01200229
## 48  0.01209508
## 49  0.01219115
## 50  0.01229927
## 51  0.01242821
## 52  0.01257477
## 53  0.01272817
## 54  0.01289412
## 55  0.01305209
## 56  0.01332566
## 57  0.01355870
## 58  0.01381813
## 59  0.01408067
## 60  0.01440225
## 61  0.01457181
## 62  0.01481270
## 63  0.01491671
## 64  0.01518372
## 65  0.01528243
## 66  0.01549299
## 67  0.01568406
## 68  0.01601445
## 69  0.01603721
## 70  0.01634352
## 71  0.01670172
## 72  0.01712025
## 73  0.01759827
## 74  0.01810894
## 75  0.01868027
## 76  0.01929644
## 77  0.01994855
## 78  0.02060290
## 79  0.01968956
## 80  0.01955552
## 81  0.01955552
## 82  0.01955552
## 83  0.01955552
## 84  0.01955552
## 85  0.01955552
## 86  0.01955552
## 87  0.01955552
## 88  0.01955552
## 89  0.01955552
## 90  0.01955552
## 91  0.01955552
## 92  0.01955552
## 93  0.01955552
## 94  0.01955552
## 95  0.01955552
## 96  0.01955552
## 97  0.01955552
## 98  0.01955552
## 99  0.01955552
## 100 0.01955552
## Warning: Removed 21 rows containing missing values (geom_path).
## Warning: Removed 21 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##       Min.    1st Qu.     Median       Mean    3rd Qu.       Max. 
## -1.5867569 -0.3531352  0.0001909 -0.0149182  0.3386178  1.1951828 
## [1] "glmnet LASSO Test MSE: 0.74645428329447"

LASSO with CV (w/ filtered train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.01 on full training set
## glmnet 
## 
## 5693 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5123, 5125, 5125, 5122, 5123, 5123, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE       Rsquared   MAE      
##   0.01000000  0.7300569  0.3649212  0.5989470
##   0.01047616  0.7302125  0.3647215  0.5991038
##   0.01097499  0.7304015  0.3644722  0.5992906
##   0.01149757  0.7306117  0.3641943  0.5995038
##   0.01204504  0.7308470  0.3638825  0.5997293
##   0.01261857  0.7310757  0.3635931  0.5999373
##   0.01321941  0.7313365  0.3632562  0.6001735
##   0.01384886  0.7315600  0.3629976  0.6003751
##   0.01450829  0.7317989  0.3627257  0.6005734
##   0.01519911  0.7320198  0.3625013  0.6007537
##   0.01592283  0.7322642  0.3622516  0.6009675
##   0.01668101  0.7325225  0.3619914  0.6011949
##   0.01747528  0.7328274  0.3616612  0.6014686
##   0.01830738  0.7331612  0.3612923  0.6017843
##   0.01917910  0.7335341  0.3608673  0.6021273
##   0.02009233  0.7339601  0.3603558  0.6025325
##   0.02104904  0.7344365  0.3597694  0.6029860
##   0.02205131  0.7349152  0.3591965  0.6034332
##   0.02310130  0.7354272  0.3585835  0.6039151
##   0.02420128  0.7359679  0.3579346  0.6044235
##   0.02535364  0.7365525  0.3572235  0.6049864
##   0.02656088  0.7371686  0.3564785  0.6055701
##   0.02782559  0.7378325  0.3556712  0.6061807
##   0.02915053  0.7385434  0.3548051  0.6068127
##   0.03053856  0.7392840  0.3539129  0.6074618
##   0.03199267  0.7400448  0.3530193  0.6081146
##   0.03351603  0.7408342  0.3521128  0.6087878
##   0.03511192  0.7416617  0.3511713  0.6094999
##   0.03678380  0.7425517  0.3501481  0.6102623
##   0.03853529  0.7435223  0.3490135  0.6111047
##   0.04037017  0.7445429  0.3478377  0.6119780
##   0.04229243  0.7456316  0.3465842  0.6129086
##   0.04430621  0.7468028  0.3452200  0.6139100
##   0.04641589  0.7479522  0.3439597  0.6148943
##   0.04862602  0.7491489  0.3426746  0.6159141
##   0.05094138  0.7503254  0.3415079  0.6169250
##   0.05336699  0.7515307  0.3403765  0.6179445
##   0.05590810  0.7527827  0.3392382  0.6190027
##   0.05857021  0.7541126  0.3380358  0.6201250
##   0.06135907  0.7554713  0.3368956  0.6212821
##   0.06428073  0.7569274  0.3356700  0.6225258
##   0.06734151  0.7583942  0.3345730  0.6237695
##   0.07054802  0.7598934  0.3335815  0.6250207
##   0.07390722  0.7615125  0.3324947  0.6263446
##   0.07742637  0.7632734  0.3312751  0.6277701
##   0.08111308  0.7651959  0.3298812  0.6292954
##   0.08497534  0.7672998  0.3282698  0.6309341
##   0.08902151  0.7696020  0.3264006  0.6327200
##   0.09326033  0.7721204  0.3242251  0.6346454
##   0.09770100  0.7748747  0.3216845  0.6367355
##   0.10235310  0.7778860  0.3187071  0.6389863
##   0.10722672  0.7811773  0.3152051  0.6414367
##   0.11233240  0.7847733  0.3110712  0.6441249
##   0.11768120  0.7886211  0.3064013  0.6470015
##   0.12328467  0.7926498  0.3013986  0.6500002
##   0.12915497  0.7966175  0.2968251  0.6529685
##   0.13530478  0.8006686  0.2922789  0.6559816
##   0.14174742  0.8050114  0.2871043  0.6592268
##   0.14849683  0.8097453  0.2808167  0.6627025
##   0.15556761  0.8149085  0.2731105  0.6664801
##   0.16297508  0.8205372  0.2636238  0.6705628
##   0.17073526  0.8265182  0.2524695  0.6748672
##   0.17886495  0.8328677  0.2393182  0.6794349
##   0.18738174  0.8386213  0.2280075  0.6835365
##   0.19630407  0.8438772  0.2183184  0.6872162
##   0.20565123  0.8486249  0.2109336  0.6905271
##   0.21544347  0.8534394  0.2032721  0.6938726
##   0.22570197  0.8586048  0.1936368  0.6974712
##   0.23644894  0.8640819  0.1816624  0.7012540
##   0.24770764  0.8683351  0.1760412  0.7041914
##   0.25950242  0.8719189  0.1750832  0.7066458
##   0.27185882  0.8756840  0.1749919  0.7092422
##   0.28480359  0.8797818  0.1749919  0.7121225
##   0.29836472  0.8842572  0.1749919  0.7152929
##   0.31257158  0.8891430  0.1749919  0.7187959
##   0.32745492  0.8944743  0.1749919  0.7226130
##   0.34304693  0.9002889  0.1749919  0.7267961
##   0.35938137  0.9066275  0.1749919  0.7313103
##   0.37649358  0.9133391  0.1661875  0.7361134
##   0.39442061  0.9157676        NaN  0.7378530
##   0.41320124  0.9157676        NaN  0.7378530
##   0.43287613  0.9157676        NaN  0.7378530
##   0.45348785  0.9157676        NaN  0.7378530
##   0.47508102  0.9157676        NaN  0.7378530
##   0.49770236  0.9157676        NaN  0.7378530
##   0.52140083  0.9157676        NaN  0.7378530
##   0.54622772  0.9157676        NaN  0.7378530
##   0.57223677  0.9157676        NaN  0.7378530
##   0.59948425  0.9157676        NaN  0.7378530
##   0.62802914  0.9157676        NaN  0.7378530
##   0.65793322  0.9157676        NaN  0.7378530
##   0.68926121  0.9157676        NaN  0.7378530
##   0.72208090  0.9157676        NaN  0.7378530
##   0.75646333  0.9157676        NaN  0.7378530
##   0.79248290  0.9157676        NaN  0.7378530
##   0.83021757  0.9157676        NaN  0.7378530
##   0.86974900  0.9157676        NaN  0.7378530
##   0.91116276  0.9157676        NaN  0.7378530
##   0.95454846  0.9157676        NaN  0.7378530
##   1.00000000  0.9157676        NaN  0.7378530
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01.

##   alpha lambda
## 1     1   0.01
##     alpha     lambda      RMSE  Rsquared       MAE     RMSESD RsquaredSD
## 1       1 0.01000000 0.7300569 0.3649212 0.5989470 0.02013901 0.01887969
## 2       1 0.01047616 0.7302125 0.3647215 0.5991038 0.02015218 0.01865761
## 3       1 0.01097499 0.7304015 0.3644722 0.5992906 0.02017753 0.01842683
## 4       1 0.01149757 0.7306117 0.3641943 0.5995038 0.02019906 0.01820390
## 5       1 0.01204504 0.7308470 0.3638825 0.5997293 0.02024497 0.01798917
## 6       1 0.01261857 0.7310757 0.3635931 0.5999373 0.02026158 0.01786115
## 7       1 0.01321941 0.7313365 0.3632562 0.6001735 0.02028413 0.01774860
## 8       1 0.01384886 0.7315600 0.3629976 0.6003751 0.02026013 0.01768358
## 9       1 0.01450829 0.7317989 0.3627257 0.6005734 0.02024008 0.01759055
## 10      1 0.01519911 0.7320198 0.3625013 0.6007537 0.02019186 0.01755504
## 11      1 0.01592283 0.7322642 0.3622516 0.6009675 0.02015790 0.01752877
## 12      1 0.01668101 0.7325225 0.3619914 0.6011949 0.02010608 0.01755861
## 13      1 0.01747528 0.7328274 0.3616612 0.6014686 0.02005514 0.01760403
## 14      1 0.01830738 0.7331612 0.3612923 0.6017843 0.01997867 0.01766266
## 15      1 0.01917910 0.7335341 0.3608673 0.6021273 0.01989168 0.01769709
## 16      1 0.02009233 0.7339601 0.3603558 0.6025325 0.01981395 0.01778449
## 17      1 0.02104904 0.7344365 0.3597694 0.6029860 0.01976759 0.01791081
## 18      1 0.02205131 0.7349152 0.3591965 0.6034332 0.01967640 0.01810086
## 19      1 0.02310130 0.7354272 0.3585835 0.6039151 0.01959774 0.01828333
## 20      1 0.02420128 0.7359679 0.3579346 0.6044235 0.01951109 0.01849900
## 21      1 0.02535364 0.7365525 0.3572235 0.6049864 0.01943450 0.01872238
## 22      1 0.02656088 0.7371686 0.3564785 0.6055701 0.01934744 0.01901354
## 23      1 0.02782559 0.7378325 0.3556712 0.6061807 0.01929076 0.01933112
## 24      1 0.02915053 0.7385434 0.3548051 0.6068127 0.01924031 0.01962039
## 25      1 0.03053856 0.7392840 0.3539129 0.6074618 0.01922121 0.01987854
## 26      1 0.03199267 0.7400448 0.3530193 0.6081146 0.01919621 0.02013961
## 27      1 0.03351603 0.7408342 0.3521128 0.6087878 0.01919127 0.02036903
## 28      1 0.03511192 0.7416617 0.3511713 0.6094999 0.01918628 0.02065708
## 29      1 0.03678380 0.7425517 0.3501481 0.6102623 0.01919729 0.02095487
## 30      1 0.03853529 0.7435223 0.3490135 0.6111047 0.01922335 0.02127278
## 31      1 0.04037017 0.7445429 0.3478377 0.6119780 0.01934032 0.02160920
## 32      1 0.04229243 0.7456316 0.3465842 0.6129086 0.01943594 0.02199059
## 33      1 0.04430621 0.7468028 0.3452200 0.6139100 0.01954013 0.02239037
## 34      1 0.04641589 0.7479522 0.3439597 0.6148943 0.01959391 0.02285037
## 35      1 0.04862602 0.7491489 0.3426746 0.6159141 0.01964524 0.02322746
## 36      1 0.05094138 0.7503254 0.3415079 0.6169250 0.01964515 0.02359057
## 37      1 0.05336699 0.7515307 0.3403765 0.6179445 0.01966784 0.02383247
## 38      1 0.05590810 0.7527827 0.3392382 0.6190027 0.01962915 0.02410810
## 39      1 0.05857021 0.7541126 0.3380358 0.6201250 0.01958542 0.02437795
## 40      1 0.06135907 0.7554713 0.3368956 0.6212821 0.01949745 0.02481481
## 41      1 0.06428073 0.7569274 0.3356700 0.6225258 0.01940228 0.02525891
## 42      1 0.06734151 0.7583942 0.3345730 0.6237695 0.01926723 0.02568710
## 43      1 0.07054802 0.7598934 0.3335815 0.6250207 0.01917844 0.02599666
## 44      1 0.07390722 0.7615125 0.3324947 0.6263446 0.01908423 0.02635145
## 45      1 0.07742637 0.7632734 0.3312751 0.6277701 0.01899289 0.02674255
## 46      1 0.08111308 0.7651959 0.3298812 0.6292954 0.01889941 0.02720296
## 47      1 0.08497534 0.7672998 0.3282698 0.6309341 0.01880447 0.02772249
## 48      1 0.08902151 0.7696020 0.3264006 0.6327200 0.01870870 0.02830884
## 49      1 0.09326033 0.7721204 0.3242251 0.6346454 0.01861246 0.02897114
## 50      1 0.09770100 0.7748747 0.3216845 0.6367355 0.01851622 0.02971954
## 51      1 0.10235310 0.7778860 0.3187071 0.6389863 0.01842050 0.03056511
## 52      1 0.10722672 0.7811773 0.3152051 0.6414367 0.01832593 0.03151958
## 53      1 0.11233240 0.7847733 0.3110712 0.6441249 0.01823324 0.03259489
## 54      1 0.11768120 0.7886211 0.3064013 0.6470015 0.01818317 0.03363465
## 55      1 0.12328467 0.7926498 0.3013986 0.6500002 0.01835225 0.03422372
## 56      1 0.12915497 0.7966175 0.2968251 0.6529685 0.01837719 0.03489991
## 57      1 0.13530478 0.8006686 0.2922789 0.6559816 0.01847591 0.03518739
## 58      1 0.14174742 0.8050114 0.2871043 0.6592268 0.01855694 0.03597420
## 59      1 0.14849683 0.8097453 0.2808167 0.6627025 0.01865265 0.03685698
## 60      1 0.15556761 0.8149085 0.2731105 0.6664801 0.01876904 0.03781767
## 61      1 0.16297508 0.8205372 0.2636238 0.6705628 0.01890877 0.03883585
## 62      1 0.17073526 0.8265182 0.2524695 0.6748672 0.01906913 0.03973329
## 63      1 0.17886495 0.8328677 0.2393182 0.6794349 0.01935394 0.04024461
## 64      1 0.18738174 0.8386213 0.2280075 0.6835365 0.01927362 0.04153130
## 65      1 0.19630407 0.8438772 0.2183184 0.6872162 0.01998430 0.03951080
## 66      1 0.20565123 0.8486249 0.2109336 0.6905271 0.02036327 0.04015452
## 67      1 0.21544347 0.8534394 0.2032721 0.6938726 0.02080244 0.04022799
## 68      1 0.22570197 0.8586048 0.1936368 0.6974712 0.02124193 0.04005153
## 69      1 0.23644894 0.8640819 0.1816624 0.7012540 0.02187787 0.03912107
## 70      1 0.24770764 0.8683351 0.1760412 0.7041914 0.02186196 0.04063697
## 71      1 0.25950242 0.8719189 0.1750832 0.7066458 0.02240949 0.03919870
## 72      1 0.27185882 0.8756840 0.1749919 0.7092422 0.02293721 0.03921811
## 73      1 0.28480359 0.8797818 0.1749919 0.7121225 0.02349154 0.03921811
## 74      1 0.29836472 0.8842572 0.1749919 0.7152929 0.02408241 0.03921811
## 75      1 0.31257158 0.8891430 0.1749919 0.7187959 0.02471116 0.03921811
## 76      1 0.32745492 0.8944743 0.1749919 0.7226130 0.02537908 0.03921811
## 77      1 0.34304693 0.9002889 0.1749919 0.7267961 0.02608736 0.03921811
## 78      1 0.35938137 0.9066275 0.1749919 0.7313103 0.02683709 0.03921811
## 79      1 0.37649358 0.9133391 0.1661875 0.7361134 0.02751735 0.02929608
## 80      1 0.39442061 0.9157676       NaN 0.7378530 0.02605539         NA
## 81      1 0.41320124 0.9157676       NaN 0.7378530 0.02605539         NA
## 82      1 0.43287613 0.9157676       NaN 0.7378530 0.02605539         NA
## 83      1 0.45348785 0.9157676       NaN 0.7378530 0.02605539         NA
## 84      1 0.47508102 0.9157676       NaN 0.7378530 0.02605539         NA
## 85      1 0.49770236 0.9157676       NaN 0.7378530 0.02605539         NA
## 86      1 0.52140083 0.9157676       NaN 0.7378530 0.02605539         NA
## 87      1 0.54622772 0.9157676       NaN 0.7378530 0.02605539         NA
## 88      1 0.57223677 0.9157676       NaN 0.7378530 0.02605539         NA
## 89      1 0.59948425 0.9157676       NaN 0.7378530 0.02605539         NA
## 90      1 0.62802914 0.9157676       NaN 0.7378530 0.02605539         NA
## 91      1 0.65793322 0.9157676       NaN 0.7378530 0.02605539         NA
## 92      1 0.68926121 0.9157676       NaN 0.7378530 0.02605539         NA
## 93      1 0.72208090 0.9157676       NaN 0.7378530 0.02605539         NA
## 94      1 0.75646333 0.9157676       NaN 0.7378530 0.02605539         NA
## 95      1 0.79248290 0.9157676       NaN 0.7378530 0.02605539         NA
## 96      1 0.83021757 0.9157676       NaN 0.7378530 0.02605539         NA
## 97      1 0.86974900 0.9157676       NaN 0.7378530 0.02605539         NA
## 98      1 0.91116276 0.9157676       NaN 0.7378530 0.02605539         NA
## 99      1 0.95454846 0.9157676       NaN 0.7378530 0.02605539         NA
## 100     1 1.00000000 0.9157676       NaN 0.7378530 0.02605539         NA
##          MAESD
## 1   0.01913813
## 2   0.01907666
## 3   0.01902119
## 4   0.01895436
## 5   0.01890457
## 6   0.01882308
## 7   0.01875094
## 8   0.01865105
## 9   0.01857558
## 10  0.01847809
## 11  0.01838357
## 12  0.01829671
## 13  0.01820469
## 14  0.01809055
## 15  0.01796924
## 16  0.01785736
## 17  0.01776233
## 18  0.01761696
## 19  0.01747109
## 20  0.01731265
## 21  0.01713065
## 22  0.01693613
## 23  0.01676096
## 24  0.01661810
## 25  0.01652040
## 26  0.01642862
## 27  0.01635515
## 28  0.01626974
## 29  0.01620442
## 30  0.01616270
## 31  0.01620285
## 32  0.01621955
## 33  0.01624510
## 34  0.01624558
## 35  0.01623953
## 36  0.01618725
## 37  0.01613709
## 38  0.01605914
## 39  0.01598116
## 40  0.01588126
## 41  0.01577457
## 42  0.01561303
## 43  0.01547585
## 44  0.01533902
## 45  0.01520797
## 46  0.01505537
## 47  0.01488049
## 48  0.01471437
## 49  0.01454046
## 50  0.01432635
## 51  0.01409384
## 52  0.01384257
## 53  0.01361304
## 54  0.01340342
## 55  0.01335652
## 56  0.01321036
## 57  0.01310888
## 58  0.01298463
## 59  0.01286326
## 60  0.01274094
## 61  0.01260910
## 62  0.01246792
## 63  0.01244643
## 64  0.01219213
## 65  0.01253552
## 66  0.01266788
## 67  0.01281701
## 68  0.01296021
## 69  0.01326117
## 70  0.01313304
## 71  0.01347772
## 72  0.01379904
## 73  0.01417516
## 74  0.01461508
## 75  0.01510787
## 76  0.01562952
## 77  0.01619032
## 78  0.01678875
## 79  0.01728340
## 80  0.01626863
## 81  0.01626863
## 82  0.01626863
## 83  0.01626863
## 84  0.01626863
## 85  0.01626863
## 86  0.01626863
## 87  0.01626863
## 88  0.01626863
## 89  0.01626863
## 90  0.01626863
## 91  0.01626863
## 92  0.01626863
## 93  0.01626863
## 94  0.01626863
## 95  0.01626863
## 96  0.01626863
## 97  0.01626863
## 98  0.01626863
## 99  0.01626863
## 100 0.01626863
## Warning: Removed 21 rows containing missing values (geom_path).
## Warning: Removed 21 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.92385 -0.43064 -0.05892 -0.06930  0.30571  1.33337 
## [1] "glmnet LASSO Test MSE: 0.753377147868002"

LARS with CV (w/ full train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.404 on full training set
## Least Angle Regression 
## 
## 6002 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE       Rsquared   MAE      
##   0.00000000  0.9984602        NaN  0.7956389
##   0.01010101  0.9853864  0.1383305  0.7860684
##   0.02020202  0.9735618  0.1383305  0.7775336
##   0.03030303  0.9630318  0.1383305  0.7699120
##   0.04040404  0.9540794  0.1451066  0.7634413
##   0.05050505  0.9457713  0.1600677  0.7575523
##   0.06060606  0.9381359  0.1705514  0.7521378
##   0.07070707  0.9311237  0.1825313  0.7470656
##   0.08080808  0.9245430  0.1942890  0.7421679
##   0.09090909  0.9181742  0.2053561  0.7372521
##   0.10101010  0.9121518  0.2142557  0.7325163
##   0.11111111  0.9064828  0.2213635  0.7279702
##   0.12121212  0.9012123  0.2271768  0.7237019
##   0.13131313  0.8963583  0.2328907  0.7197642
##   0.14141414  0.8916759  0.2386085  0.7160013
##   0.15151515  0.8872755  0.2434034  0.7124292
##   0.16161616  0.8831667  0.2473780  0.7090381
##   0.17171717  0.8793537  0.2506574  0.7058432
##   0.18181818  0.8758404  0.2533493  0.7028189
##   0.19191919  0.8726304  0.2555463  0.6999908
##   0.20202020  0.8697614  0.2573124  0.6973552
##   0.21212121  0.8672770  0.2589044  0.6949879
##   0.22222222  0.8650826  0.2607638  0.6928541
##   0.23232323  0.8631397  0.2625807  0.6909500
##   0.24242424  0.8614078  0.2643740  0.6892150
##   0.25252525  0.8598043  0.2661329  0.6875901
##   0.26262626  0.8583103  0.2678305  0.6860748
##   0.27272727  0.8570109  0.2692425  0.6847491
##   0.28282828  0.8560200  0.2702487  0.6837151
##   0.29292929  0.8552210  0.2710414  0.6829195
##   0.30303030  0.8545685  0.2717156  0.6822948
##   0.31313131  0.8540313  0.2723008  0.6817644
##   0.32323232  0.8536204  0.2727079  0.6813087
##   0.33333333  0.8532796  0.2730276  0.6809228
##   0.34343434  0.8529506  0.2733510  0.6805513
##   0.35353535  0.8526654  0.2736253  0.6801858
##   0.36363636  0.8524303  0.2738378  0.6798640
##   0.37373737  0.8522403  0.2739917  0.6795933
##   0.38383838  0.8521239  0.2740303  0.6793866
##   0.39393939  0.8520759  0.2739644  0.6792575
##   0.40404040  0.8520563  0.2738619  0.6791467
##   0.41414141  0.8520588  0.2737339  0.6790641
##   0.42424242  0.8520975  0.2735548  0.6790253
##   0.43434343  0.8521754  0.2733234  0.6790304
##   0.44444444  0.8522733  0.2730668  0.6790596
##   0.45454545  0.8523682  0.2728249  0.6790959
##   0.46464646  0.8524641  0.2725901  0.6791348
##   0.47474747  0.8525684  0.2723466  0.6791790
##   0.48484848  0.8526770  0.2721016  0.6792361
##   0.49494949  0.8527917  0.2718514  0.6792919
##   0.50505051  0.8529179  0.2715858  0.6793530
##   0.51515152  0.8530524  0.2713107  0.6794187
##   0.52525253  0.8531889  0.2710374  0.6794874
##   0.53535354  0.8533348  0.2707518  0.6795619
##   0.54545455  0.8534803  0.2704718  0.6796332
##   0.55555556  0.8536340  0.2701833  0.6797136
##   0.56565657  0.8537940  0.2698888  0.6797983
##   0.57575758  0.8539625  0.2695839  0.6798917
##   0.58585859  0.8541330  0.2692799  0.6799950
##   0.59595960  0.8543141  0.2689617  0.6801113
##   0.60606061  0.8545007  0.2686378  0.6802348
##   0.61616162  0.8546941  0.2683070  0.6803725
##   0.62626263  0.8548912  0.2679748  0.6805150
##   0.63636364  0.8550968  0.2676335  0.6806649
##   0.64646465  0.8553038  0.2672950  0.6808144
##   0.65656566  0.8555147  0.2669547  0.6809561
##   0.66666667  0.8557193  0.2666297  0.6810921
##   0.67676768  0.8559293  0.2662998  0.6812326
##   0.68686869  0.8561396  0.2659726  0.6813744
##   0.69696970  0.8563516  0.2656467  0.6815165
##   0.70707071  0.8565691  0.2653159  0.6816548
##   0.71717172  0.8567860  0.2649875  0.6817940
##   0.72727273  0.8569992  0.2646677  0.6819296
##   0.73737374  0.8572152  0.2643463  0.6820680
##   0.74747475  0.8574337  0.2640249  0.6822107
##   0.75757576  0.8576582  0.2636978  0.6823626
##   0.76767677  0.8578872  0.2633675  0.6825193
##   0.77777778  0.8581189  0.2630367  0.6826791
##   0.78787879  0.8583614  0.2626915  0.6828480
##   0.79797980  0.8586122  0.2623369  0.6830262
##   0.80808081  0.8588657  0.2619826  0.6832098
##   0.81818182  0.8591205  0.2616300  0.6833961
##   0.82828283  0.8593795  0.2612751  0.6835908
##   0.83838384  0.8596402  0.2609209  0.6837858
##   0.84848485  0.8599080  0.2605585  0.6839893
##   0.85858586  0.8601813  0.2601900  0.6841973
##   0.86868687  0.8604595  0.2598174  0.6844098
##   0.87878788  0.8607467  0.2594346  0.6846275
##   0.88888889  0.8610367  0.2590501  0.6848456
##   0.89898990  0.8613302  0.2586636  0.6850622
##   0.90909091  0.8616242  0.2582792  0.6852801
##   0.91919192  0.8619177  0.2578996  0.6854957
##   0.92929293  0.8622149  0.2575184  0.6857125
##   0.93939394  0.8625193  0.2571292  0.6859309
##   0.94949495  0.8628250  0.2567415  0.6861488
##   0.95959596  0.8631353  0.2563493  0.6863690
##   0.96969697  0.8634466  0.2559587  0.6865938
##   0.97979798  0.8637615  0.2555660  0.6868255
##   0.98989899  0.8640802  0.2551702  0.6870634
##   1.00000000  0.8644000  0.2547759  0.6873056
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.4040404.

##     fraction
## 41 0.4040404
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##       Min.    1st Qu.     Median       Mean    3rd Qu.       Max. 
## -1.5858261 -0.3532425 -0.0002983 -0.0149202  0.3393275  1.1950036 
## [1] "lars  Test MSE: 0.746443158612502"

LARS with CV (w/ filtered train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.636 on full training set
## Least Angle Regression 
## 
## 5693 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5123, 5125, 5125, 5122, 5123, 5123, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE       Rsquared   MAE      
##   0.00000000  0.9157676        NaN  0.7378530
##   0.01010101  0.8998940  0.1749919  0.7264847
##   0.02020202  0.8855247  0.1749919  0.7161765
##   0.03030303  0.8727339  0.1749919  0.7071902
##   0.04040404  0.8616646  0.1865211  0.6996360
##   0.05050505  0.8513742  0.2065669  0.6924733
##   0.06060606  0.8420055  0.2207189  0.6859574
##   0.07070707  0.8333704  0.2378053  0.6798252
##   0.08080808  0.8250851  0.2550096  0.6738602
##   0.09090909  0.8171322  0.2694270  0.6681046
##   0.10101010  0.8096047  0.2809403  0.6625977
##   0.11111111  0.8025146  0.2900732  0.6573620
##   0.12121212  0.7959213  0.2973875  0.6524352
##   0.13131313  0.7898308  0.3047739  0.6478973
##   0.14141414  0.7839817  0.3120972  0.6435353
##   0.15151515  0.7784963  0.3181662  0.6394424
##   0.16161616  0.7733823  0.3231708  0.6356079
##   0.17171717  0.7686472  0.3272771  0.6319850
##   0.18181818  0.7642979  0.3306276  0.6285951
##   0.19191919  0.7603570  0.3333418  0.6254136
##   0.20202020  0.7569158  0.3357804  0.6225210
##   0.21212121  0.7538388  0.3383595  0.6198990
##   0.22222222  0.7510899  0.3408345  0.6175702
##   0.23232323  0.7486233  0.3433442  0.6154681
##   0.24242424  0.7463306  0.3459075  0.6134941
##   0.25252525  0.7441939  0.3483684  0.6116588
##   0.26262626  0.7422575  0.3506229  0.6099976
##   0.27272727  0.7406049  0.3524977  0.6085770
##   0.28282828  0.7392509  0.3540561  0.6074070
##   0.29292929  0.7380375  0.3555306  0.6063200
##   0.30303030  0.7370324  0.3567517  0.6053969
##   0.31313131  0.7362278  0.3577190  0.6046308
##   0.32323232  0.7355409  0.3585495  0.6039814
##   0.33333333  0.7349360  0.3592752  0.6034242
##   0.34343434  0.7343984  0.3599193  0.6029130
##   0.35353535  0.7338885  0.3605464  0.6024435
##   0.36363636  0.7334656  0.3610417  0.6020597
##   0.37373737  0.7331105  0.3614431  0.6017461
##   0.38383838  0.7327903  0.3618064  0.6014481
##   0.39393939  0.7325168  0.3621035  0.6011877
##   0.40404040  0.7322946  0.3623239  0.6009911
##   0.41414141  0.7321123  0.3624884  0.6008366
##   0.42424242  0.7319419  0.3626453  0.6006871
##   0.43434343  0.7317774  0.3628033  0.6005502
##   0.44444444  0.7316017  0.3629956  0.6004077
##   0.45454545  0.7314255  0.3631988  0.6002577
##   0.46464646  0.7312469  0.3634146  0.6001002
##   0.47474747  0.7310683  0.3636384  0.5999419
##   0.48484848  0.7308989  0.3638514  0.5997884
##   0.49494949  0.7307292  0.3640709  0.5996326
##   0.50505051  0.7305676  0.3642816  0.5994777
##   0.51515152  0.7304153  0.3644811  0.5993191
##   0.52525253  0.7302670  0.3646793  0.5991639
##   0.53535354  0.7301324  0.3648575  0.5990291
##   0.54545455  0.7300164  0.3650071  0.5989124
##   0.55555556  0.7299210  0.3651240  0.5988122
##   0.56565657  0.7298444  0.3652133  0.5987350
##   0.57575758  0.7297743  0.3652962  0.5986692
##   0.58585859  0.7297118  0.3653703  0.5986125
##   0.59595960  0.7296563  0.3654369  0.5985631
##   0.60606061  0.7296114  0.3654887  0.5985174
##   0.61616162  0.7295662  0.3655463  0.5984755
##   0.62626263  0.7295360  0.3655818  0.5984489
##   0.63636364  0.7295160  0.3656038  0.5984331
##   0.64646465  0.7295164  0.3655942  0.5984319
##   0.65656566  0.7295310  0.3655640  0.5984410
##   0.66666667  0.7295623  0.3655095  0.5984520
##   0.67676768  0.7296068  0.3654358  0.5984704
##   0.68686869  0.7296606  0.3653495  0.5984969
##   0.69696970  0.7297263  0.3652464  0.5985334
##   0.70707071  0.7298092  0.3651174  0.5985815
##   0.71717172  0.7299029  0.3649748  0.5986390
##   0.72727273  0.7300044  0.3648234  0.5987069
##   0.73737374  0.7301190  0.3646537  0.5987859
##   0.74747475  0.7302490  0.3644616  0.5988812
##   0.75757576  0.7303860  0.3642629  0.5989834
##   0.76767677  0.7305362  0.3640456  0.5990915
##   0.77777778  0.7306969  0.3638150  0.5992060
##   0.78787879  0.7308662  0.3635731  0.5993290
##   0.79797980  0.7310455  0.3633182  0.5994640
##   0.80808081  0.7312368  0.3630466  0.5996056
##   0.81818182  0.7314332  0.3627706  0.5997458
##   0.82828283  0.7316402  0.3624805  0.5999009
##   0.83838384  0.7318566  0.3621788  0.6000627
##   0.84848485  0.7320858  0.3618598  0.6002333
##   0.85858586  0.7323278  0.3615239  0.6004096
##   0.86868687  0.7325765  0.3611805  0.6005951
##   0.87878788  0.7328305  0.3608311  0.6007827
##   0.88888889  0.7330980  0.3604633  0.6009834
##   0.89898990  0.7333718  0.3600891  0.6011896
##   0.90909091  0.7336483  0.3597143  0.6014028
##   0.91919192  0.7339368  0.3593236  0.6016225
##   0.92929293  0.7342349  0.3589216  0.6018458
##   0.93939394  0.7345392  0.3585131  0.6020755
##   0.94949495  0.7348502  0.3580984  0.6023115
##   0.95959596  0.7351740  0.3576665  0.6025551
##   0.96969697  0.7355091  0.3572206  0.6028053
##   0.97979798  0.7358554  0.3567607  0.6030638
##   0.98989899  0.7362102  0.3562913  0.6033226
##   1.00000000  0.7365714  0.3558164  0.6035841
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.6363636.

##     fraction
## 64 0.6363636
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.99969 -0.44259 -0.05504 -0.06924  0.31704  1.39209 
## [1] "lars  Test MSE: 0.756885383759297"